diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/__main__.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bdbca2a46155d18ade98cf3ce58942f73b3af3d Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/__main__.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/afmLib.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/afmLib.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b843c28e2c1da74a46b70cc52576c081b5fab46 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/afmLib.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/fontBuilder.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/fontBuilder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e0acb05eeeb1abac85fe792fadae96e0b32b8630 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/fontBuilder.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/help.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/help.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..136e1f5633c77b88eec5c175e8eb197e51bb7c40 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/help.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/ttx.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/ttx.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..446bd4de077dc0eef5443973668f7bb9634cb904 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/ttx.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/unicode.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/unicode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38f75784bfee55591ba97c714cef2016b7e39101 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/__pycache__/unicode.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__pycache__/__init__.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9d2ad60c76fb659c41c4a70767f8d9e79f8bb50 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/statNames.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/statNames.py new file mode 100644 index 0000000000000000000000000000000000000000..1474e5fcf560343feb759680c121f25802c05bf6 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/statNames.py @@ -0,0 +1,253 @@ +"""Compute name information for a given location in user-space coordinates +using STAT data. This can be used to fill-in automatically the names of an +instance: + +.. code:: python + + instance = doc.instances[0] + names = getStatNames(doc, instance.getFullUserLocation(doc)) + print(names.styleNames) +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Dict, Optional, Tuple, Union +import logging + +from fontTools.designspaceLib import ( + AxisDescriptor, + AxisLabelDescriptor, + DesignSpaceDocument, + DesignSpaceDocumentError, + DiscreteAxisDescriptor, + SimpleLocationDict, + SourceDescriptor, +) + +LOGGER = logging.getLogger(__name__) + +# TODO(Python 3.8): use Literal +# RibbiStyleName = Union[Literal["regular"], Literal["bold"], Literal["italic"], Literal["bold italic"]] +RibbiStyle = str +BOLD_ITALIC_TO_RIBBI_STYLE = { + (False, False): "regular", + (False, True): "italic", + (True, False): "bold", + (True, True): "bold italic", +} + + +@dataclass +class StatNames: + """Name data generated from the STAT table information.""" + + familyNames: Dict[str, str] + styleNames: Dict[str, str] + postScriptFontName: Optional[str] + styleMapFamilyNames: Dict[str, str] + styleMapStyleName: Optional[RibbiStyle] + + +def getStatNames( + doc: DesignSpaceDocument, userLocation: SimpleLocationDict +) -> StatNames: + """Compute the family, style, PostScript names of the given ``userLocation`` + using the document's STAT information. + + Also computes localizations. + + If not enough STAT data is available for a given name, either its dict of + localized names will be empty (family and style names), or the name will be + None (PostScript name). + + .. versionadded:: 5.0 + """ + familyNames: Dict[str, str] = {} + defaultSource: Optional[SourceDescriptor] = doc.findDefault() + if defaultSource is None: + LOGGER.warning("Cannot determine default source to look up family name.") + elif defaultSource.familyName is None: + LOGGER.warning( + "Cannot look up family name, assign the 'familyname' attribute to the default source." + ) + else: + familyNames = { + "en": defaultSource.familyName, + **defaultSource.localisedFamilyName, + } + + styleNames: Dict[str, str] = {} + # If a free-standing label matches the location, use it for name generation. + label = doc.labelForUserLocation(userLocation) + if label is not None: + styleNames = {"en": label.name, **label.labelNames} + # Otherwise, scour the axis labels for matches. + else: + # Gather all languages in which at least one translation is provided + # Then build names for all these languages, but fallback to English + # whenever a translation is missing. + labels = _getAxisLabelsForUserLocation(doc.axes, userLocation) + if labels: + languages = set( + language for label in labels for language in label.labelNames + ) + languages.add("en") + for language in languages: + styleName = " ".join( + label.labelNames.get(language, label.defaultName) + for label in labels + if not label.elidable + ) + if not styleName and doc.elidedFallbackName is not None: + styleName = doc.elidedFallbackName + styleNames[language] = styleName + + if "en" not in familyNames or "en" not in styleNames: + # Not enough information to compute PS names of styleMap names + return StatNames( + familyNames=familyNames, + styleNames=styleNames, + postScriptFontName=None, + styleMapFamilyNames={}, + styleMapStyleName=None, + ) + + postScriptFontName = f"{familyNames['en']}-{styleNames['en']}".replace(" ", "") + + styleMapStyleName, regularUserLocation = _getRibbiStyle(doc, userLocation) + + styleNamesForStyleMap = styleNames + if regularUserLocation != userLocation: + regularStatNames = getStatNames(doc, regularUserLocation) + styleNamesForStyleMap = regularStatNames.styleNames + + styleMapFamilyNames = {} + for language in set(familyNames).union(styleNames.keys()): + familyName = familyNames.get(language, familyNames["en"]) + styleName = styleNamesForStyleMap.get(language, styleNamesForStyleMap["en"]) + styleMapFamilyNames[language] = (familyName + " " + styleName).strip() + + return StatNames( + familyNames=familyNames, + styleNames=styleNames, + postScriptFontName=postScriptFontName, + styleMapFamilyNames=styleMapFamilyNames, + styleMapStyleName=styleMapStyleName, + ) + + +def _getSortedAxisLabels( + axes: list[Union[AxisDescriptor, DiscreteAxisDescriptor]], +) -> Dict[str, list[AxisLabelDescriptor]]: + """Returns axis labels sorted by their ordering, with unordered ones appended as + they are listed.""" + + # First, get the axis labels with explicit ordering... + sortedAxes = sorted( + (axis for axis in axes if axis.axisOrdering is not None), + key=lambda a: a.axisOrdering, + ) + sortedLabels: Dict[str, list[AxisLabelDescriptor]] = { + axis.name: axis.axisLabels for axis in sortedAxes + } + + # ... then append the others in the order they appear. + # NOTE: This relies on Python 3.7+ dict's preserved insertion order. + for axis in axes: + if axis.axisOrdering is None: + sortedLabels[axis.name] = axis.axisLabels + + return sortedLabels + + +def _getAxisLabelsForUserLocation( + axes: list[Union[AxisDescriptor, DiscreteAxisDescriptor]], + userLocation: SimpleLocationDict, +) -> list[AxisLabelDescriptor]: + labels: list[AxisLabelDescriptor] = [] + + allAxisLabels = _getSortedAxisLabels(axes) + if allAxisLabels.keys() != userLocation.keys(): + LOGGER.warning( + f"Mismatch between user location '{userLocation.keys()}' and available " + f"labels for '{allAxisLabels.keys()}'." + ) + + for axisName, axisLabels in allAxisLabels.items(): + userValue = userLocation[axisName] + label: Optional[AxisLabelDescriptor] = next( + ( + l + for l in axisLabels + if l.userValue == userValue + or ( + l.userMinimum is not None + and l.userMaximum is not None + and l.userMinimum <= userValue <= l.userMaximum + ) + ), + None, + ) + if label is None: + LOGGER.debug( + f"Document needs a label for axis '{axisName}', user value '{userValue}'." + ) + else: + labels.append(label) + + return labels + + +def _getRibbiStyle( + self: DesignSpaceDocument, userLocation: SimpleLocationDict +) -> Tuple[RibbiStyle, SimpleLocationDict]: + """Compute the RIBBI style name of the given user location, + return the location of the matching Regular in the RIBBI group. + + .. versionadded:: 5.0 + """ + regularUserLocation = {} + axes_by_tag = {axis.tag: axis for axis in self.axes} + + bold: bool = False + italic: bool = False + + axis = axes_by_tag.get("wght") + if axis is not None: + for regular_label in axis.axisLabels: + if ( + regular_label.linkedUserValue == userLocation[axis.name] + # In the "recursive" case where both the Regular has + # linkedUserValue pointing the Bold, and the Bold has + # linkedUserValue pointing to the Regular, only consider the + # first case: Regular (e.g. 400) has linkedUserValue pointing to + # Bold (e.g. 700, higher than Regular) + and regular_label.userValue < regular_label.linkedUserValue + ): + regularUserLocation[axis.name] = regular_label.userValue + bold = True + break + + axis = axes_by_tag.get("ital") or axes_by_tag.get("slnt") + if axis is not None: + for upright_label in axis.axisLabels: + if ( + upright_label.linkedUserValue == userLocation[axis.name] + # In the "recursive" case where both the Upright has + # linkedUserValue pointing the Italic, and the Italic has + # linkedUserValue pointing to the Upright, only consider the + # first case: Upright (e.g. ital=0, slant=0) has + # linkedUserValue pointing to Italic (e.g ital=1, slant=-12 or + # slant=12 for backwards italics, in any case higher than + # Upright in absolute value, hence the abs() below. + and abs(upright_label.userValue) < abs(upright_label.linkedUserValue) + ): + regularUserLocation[axis.name] = upright_label.userValue + italic = True + break + + return BOLD_ITALIC_TO_RIBBI_STYLE[bold, italic], { + **userLocation, + **regularUserLocation, + } diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/types.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/types.py new file mode 100644 index 0000000000000000000000000000000000000000..80ba9d6d7b44f58773f42107d672c13651c166a9 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/types.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Dict, List, Optional, Union, cast + +from fontTools.designspaceLib import ( + AxisDescriptor, + DesignSpaceDocument, + DesignSpaceDocumentError, + RangeAxisSubsetDescriptor, + SimpleLocationDict, + ValueAxisSubsetDescriptor, + VariableFontDescriptor, +) + + +def clamp(value, minimum, maximum): + return min(max(value, minimum), maximum) + + +@dataclass +class Range: + minimum: float + """Inclusive minimum of the range.""" + maximum: float + """Inclusive maximum of the range.""" + default: float = 0 + """Default value""" + + def __post_init__(self): + self.minimum, self.maximum = sorted((self.minimum, self.maximum)) + self.default = clamp(self.default, self.minimum, self.maximum) + + def __contains__(self, value: Union[float, Range]) -> bool: + if isinstance(value, Range): + return self.minimum <= value.minimum and value.maximum <= self.maximum + return self.minimum <= value <= self.maximum + + def intersection(self, other: Range) -> Optional[Range]: + if self.maximum < other.minimum or self.minimum > other.maximum: + return None + else: + return Range( + max(self.minimum, other.minimum), + min(self.maximum, other.maximum), + self.default, # We don't care about the default in this use-case + ) + + +# A region selection is either a range or a single value, as a Designspace v5 +# axis-subset element only allows a single discrete value or a range for a +# variable-font element. +Region = Dict[str, Union[Range, float]] + +# A conditionset is a set of named ranges. +ConditionSet = Dict[str, Range] + +# A rule is a list of conditionsets where any has to be relevant for the whole rule to be relevant. +Rule = List[ConditionSet] +Rules = Dict[str, Rule] + + +def locationInRegion(location: SimpleLocationDict, region: Region) -> bool: + for name, value in location.items(): + if name not in region: + return False + regionValue = region[name] + if isinstance(regionValue, (float, int)): + if value != regionValue: + return False + else: + if value not in regionValue: + return False + return True + + +def regionInRegion(region: Region, superRegion: Region) -> bool: + for name, value in region.items(): + if not name in superRegion: + return False + superValue = superRegion[name] + if isinstance(superValue, (float, int)): + if value != superValue: + return False + else: + if value not in superValue: + return False + return True + + +def userRegionToDesignRegion(doc: DesignSpaceDocument, userRegion: Region) -> Region: + designRegion = {} + for name, value in userRegion.items(): + axis = doc.getAxis(name) + if axis is None: + raise DesignSpaceDocumentError( + f"Cannot find axis named '{name}' for region." + ) + if isinstance(value, (float, int)): + designRegion[name] = axis.map_forward(value) + else: + designRegion[name] = Range( + axis.map_forward(value.minimum), + axis.map_forward(value.maximum), + axis.map_forward(value.default), + ) + return designRegion + + +def getVFUserRegion(doc: DesignSpaceDocument, vf: VariableFontDescriptor) -> Region: + vfUserRegion: Region = {} + # For each axis, 2 cases: + # - it has a range = it's an axis in the VF DS + # - it's a single location = use it to know which rules should apply in the VF + for axisSubset in vf.axisSubsets: + axis = doc.getAxis(axisSubset.name) + if axis is None: + raise DesignSpaceDocumentError( + f"Cannot find axis named '{axisSubset.name}' for variable font '{vf.name}'." + ) + if hasattr(axisSubset, "userMinimum"): + # Mypy doesn't support narrowing union types via hasattr() + # TODO(Python 3.10): use TypeGuard + # https://mypy.readthedocs.io/en/stable/type_narrowing.html + axisSubset = cast(RangeAxisSubsetDescriptor, axisSubset) + if not hasattr(axis, "minimum"): + raise DesignSpaceDocumentError( + f"Cannot select a range over '{axis.name}' for variable font '{vf.name}' " + "because it's a discrete axis, use only 'userValue' instead." + ) + axis = cast(AxisDescriptor, axis) + vfUserRegion[axis.name] = Range( + max(axisSubset.userMinimum, axis.minimum), + min(axisSubset.userMaximum, axis.maximum), + axisSubset.userDefault or axis.default, + ) + else: + axisSubset = cast(ValueAxisSubsetDescriptor, axisSubset) + vfUserRegion[axis.name] = axisSubset.userValue + # Any axis not mentioned explicitly has a single location = default value + for axis in doc.axes: + if axis.name not in vfUserRegion: + assert isinstance( + axis.default, (int, float) + ), f"Axis '{axis.name}' has no valid default value." + vfUserRegion[axis.name] = axis.default + return vfUserRegion diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/location.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/location.py new file mode 100644 index 0000000000000000000000000000000000000000..50f761d2d2a13bd101a7db9c259fedc98eed52cf --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/location.py @@ -0,0 +1,12 @@ +from typing import NamedTuple + + +class FeatureLibLocation(NamedTuple): + """A location in a feature file""" + + file: str + line: int + column: int + + def __str__(self): + return f"{self.file}:{self.line}:{self.column}" diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lookupDebugInfo.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lookupDebugInfo.py new file mode 100644 index 0000000000000000000000000000000000000000..d4da7de0aed6b87dae6a1d4b417f1c6e099fe1e0 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lookupDebugInfo.py @@ -0,0 +1,12 @@ +from typing import NamedTuple + +LOOKUP_DEBUG_INFO_KEY = "com.github.fonttools.feaLib" +LOOKUP_DEBUG_ENV_VAR = "FONTTOOLS_LOOKUP_DEBUGGING" + + +class LookupDebugInfo(NamedTuple): + """Information about where a lookup came from, to be embedded in a font""" + + location: str + name: str + feature: list diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__main__.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..ff632d49c54e678623a27998a9d51b7cf84df81f --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__main__.py @@ -0,0 +1,6 @@ +import sys +from fontTools.merge import main + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/options.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/options.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45ffe27473e366ff62fd8aa2dccb4daa01b3e9a4 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/options.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/util.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b73232707eb16592c315c51947278a0c045ea73c Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/util.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/layout.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/layout.py new file mode 100644 index 0000000000000000000000000000000000000000..e1b504e6198fbee20070078c8391a4a922255889 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/layout.py @@ -0,0 +1,526 @@ +# Copyright 2013 Google, Inc. All Rights Reserved. +# +# Google Author(s): Behdad Esfahbod, Roozbeh Pournader + +from fontTools import ttLib +from fontTools.ttLib.tables.DefaultTable import DefaultTable +from fontTools.ttLib.tables import otTables +from fontTools.merge.base import add_method, mergeObjects +from fontTools.merge.util import * +import logging + + +log = logging.getLogger("fontTools.merge") + + +def mergeLookupLists(lst): + # TODO Do smarter merge. + return sumLists(lst) + + +def mergeFeatures(lst): + assert lst + self = otTables.Feature() + self.FeatureParams = None + self.LookupListIndex = mergeLookupLists( + [l.LookupListIndex for l in lst if l.LookupListIndex] + ) + self.LookupCount = len(self.LookupListIndex) + return self + + +def mergeFeatureLists(lst): + d = {} + for l in lst: + for f in l: + tag = f.FeatureTag + if tag not in d: + d[tag] = [] + d[tag].append(f.Feature) + ret = [] + for tag in sorted(d.keys()): + rec = otTables.FeatureRecord() + rec.FeatureTag = tag + rec.Feature = mergeFeatures(d[tag]) + ret.append(rec) + return ret + + +def mergeLangSyses(lst): + assert lst + + # TODO Support merging ReqFeatureIndex + assert all(l.ReqFeatureIndex == 0xFFFF for l in lst) + + self = otTables.LangSys() + self.LookupOrder = None + self.ReqFeatureIndex = 0xFFFF + self.FeatureIndex = mergeFeatureLists( + [l.FeatureIndex for l in lst if l.FeatureIndex] + ) + self.FeatureCount = len(self.FeatureIndex) + return self + + +def mergeScripts(lst): + assert lst + + if len(lst) == 1: + return lst[0] + langSyses = {} + for sr in lst: + for lsr in sr.LangSysRecord: + if lsr.LangSysTag not in langSyses: + langSyses[lsr.LangSysTag] = [] + langSyses[lsr.LangSysTag].append(lsr.LangSys) + lsrecords = [] + for tag, langSys_list in sorted(langSyses.items()): + lsr = otTables.LangSysRecord() + lsr.LangSys = mergeLangSyses(langSys_list) + lsr.LangSysTag = tag + lsrecords.append(lsr) + + self = otTables.Script() + self.LangSysRecord = lsrecords + self.LangSysCount = len(lsrecords) + dfltLangSyses = [s.DefaultLangSys for s in lst if s.DefaultLangSys] + if dfltLangSyses: + self.DefaultLangSys = mergeLangSyses(dfltLangSyses) + else: + self.DefaultLangSys = None + return self + + +def mergeScriptRecords(lst): + d = {} + for l in lst: + for s in l: + tag = s.ScriptTag + if tag not in d: + d[tag] = [] + d[tag].append(s.Script) + ret = [] + for tag in sorted(d.keys()): + rec = otTables.ScriptRecord() + rec.ScriptTag = tag + rec.Script = mergeScripts(d[tag]) + ret.append(rec) + return ret + + +otTables.ScriptList.mergeMap = { + "ScriptCount": lambda lst: None, # TODO + "ScriptRecord": mergeScriptRecords, +} +otTables.BaseScriptList.mergeMap = { + "BaseScriptCount": lambda lst: None, # TODO + # TODO: Merge duplicate entries + "BaseScriptRecord": lambda lst: sorted( + sumLists(lst), key=lambda s: s.BaseScriptTag + ), +} + +otTables.FeatureList.mergeMap = { + "FeatureCount": sum, + "FeatureRecord": lambda lst: sorted(sumLists(lst), key=lambda s: s.FeatureTag), +} + +otTables.LookupList.mergeMap = { + "LookupCount": sum, + "Lookup": sumLists, +} + +otTables.Coverage.mergeMap = { + "Format": min, + "glyphs": sumLists, +} + +otTables.ClassDef.mergeMap = { + "Format": min, + "classDefs": sumDicts, +} + +otTables.LigCaretList.mergeMap = { + "Coverage": mergeObjects, + "LigGlyphCount": sum, + "LigGlyph": sumLists, +} + +otTables.AttachList.mergeMap = { + "Coverage": mergeObjects, + "GlyphCount": sum, + "AttachPoint": sumLists, +} + +# XXX Renumber MarkFilterSets of lookups +otTables.MarkGlyphSetsDef.mergeMap = { + "MarkSetTableFormat": equal, + "MarkSetCount": sum, + "Coverage": sumLists, +} + +otTables.Axis.mergeMap = { + "*": mergeObjects, +} + +# XXX Fix BASE table merging +otTables.BaseTagList.mergeMap = { + "BaseTagCount": sum, + "BaselineTag": sumLists, +} + +otTables.GDEF.mergeMap = otTables.GSUB.mergeMap = otTables.GPOS.mergeMap = ( + otTables.BASE.mergeMap +) = otTables.JSTF.mergeMap = otTables.MATH.mergeMap = { + "*": mergeObjects, + "Version": max, +} + +ttLib.getTableClass("GDEF").mergeMap = ttLib.getTableClass("GSUB").mergeMap = ( + ttLib.getTableClass("GPOS").mergeMap +) = ttLib.getTableClass("BASE").mergeMap = ttLib.getTableClass( + "JSTF" +).mergeMap = ttLib.getTableClass( + "MATH" +).mergeMap = { + "tableTag": onlyExisting(equal), # XXX clean me up + "table": mergeObjects, +} + + +@add_method(ttLib.getTableClass("GSUB")) +def merge(self, m, tables): + assert len(tables) == len(m.duplicateGlyphsPerFont) + for i, (table, dups) in enumerate(zip(tables, m.duplicateGlyphsPerFont)): + if not dups: + continue + if table is None or table is NotImplemented: + log.warning( + "Have non-identical duplicates to resolve for '%s' but no GSUB. Are duplicates intended?: %s", + m.fonts[i]._merger__name, + dups, + ) + continue + + synthFeature = None + synthLookup = None + for script in table.table.ScriptList.ScriptRecord: + if script.ScriptTag == "DFLT": + continue # XXX + for langsys in [script.Script.DefaultLangSys] + [ + l.LangSys for l in script.Script.LangSysRecord + ]: + if langsys is None: + continue # XXX Create! + feature = [v for v in langsys.FeatureIndex if v.FeatureTag == "locl"] + assert len(feature) <= 1 + if feature: + feature = feature[0] + else: + if not synthFeature: + synthFeature = otTables.FeatureRecord() + synthFeature.FeatureTag = "locl" + f = synthFeature.Feature = otTables.Feature() + f.FeatureParams = None + f.LookupCount = 0 + f.LookupListIndex = [] + table.table.FeatureList.FeatureRecord.append(synthFeature) + table.table.FeatureList.FeatureCount += 1 + feature = synthFeature + langsys.FeatureIndex.append(feature) + langsys.FeatureIndex.sort(key=lambda v: v.FeatureTag) + + if not synthLookup: + subtable = otTables.SingleSubst() + subtable.mapping = dups + synthLookup = otTables.Lookup() + synthLookup.LookupFlag = 0 + synthLookup.LookupType = 1 + synthLookup.SubTableCount = 1 + synthLookup.SubTable = [subtable] + if table.table.LookupList is None: + # mtiLib uses None as default value for LookupList, + # while feaLib points to an empty array with count 0 + # TODO: make them do the same + table.table.LookupList = otTables.LookupList() + table.table.LookupList.Lookup = [] + table.table.LookupList.LookupCount = 0 + table.table.LookupList.Lookup.append(synthLookup) + table.table.LookupList.LookupCount += 1 + + if feature.Feature.LookupListIndex[:1] != [synthLookup]: + feature.Feature.LookupListIndex[:0] = [synthLookup] + feature.Feature.LookupCount += 1 + + DefaultTable.merge(self, m, tables) + return self + + +@add_method( + otTables.SingleSubst, + otTables.MultipleSubst, + otTables.AlternateSubst, + otTables.LigatureSubst, + otTables.ReverseChainSingleSubst, + otTables.SinglePos, + otTables.PairPos, + otTables.CursivePos, + otTables.MarkBasePos, + otTables.MarkLigPos, + otTables.MarkMarkPos, +) +def mapLookups(self, lookupMap): + pass + + +# Copied and trimmed down from subset.py +@add_method( + otTables.ContextSubst, + otTables.ChainContextSubst, + otTables.ContextPos, + otTables.ChainContextPos, +) +def __merge_classify_context(self): + class ContextHelper(object): + def __init__(self, klass, Format): + if klass.__name__.endswith("Subst"): + Typ = "Sub" + Type = "Subst" + else: + Typ = "Pos" + Type = "Pos" + if klass.__name__.startswith("Chain"): + Chain = "Chain" + else: + Chain = "" + ChainTyp = Chain + Typ + + self.Typ = Typ + self.Type = Type + self.Chain = Chain + self.ChainTyp = ChainTyp + + self.LookupRecord = Type + "LookupRecord" + + if Format == 1: + self.Rule = ChainTyp + "Rule" + self.RuleSet = ChainTyp + "RuleSet" + elif Format == 2: + self.Rule = ChainTyp + "ClassRule" + self.RuleSet = ChainTyp + "ClassSet" + + if self.Format not in [1, 2, 3]: + return None # Don't shoot the messenger; let it go + if not hasattr(self.__class__, "_merge__ContextHelpers"): + self.__class__._merge__ContextHelpers = {} + if self.Format not in self.__class__._merge__ContextHelpers: + helper = ContextHelper(self.__class__, self.Format) + self.__class__._merge__ContextHelpers[self.Format] = helper + return self.__class__._merge__ContextHelpers[self.Format] + + +@add_method( + otTables.ContextSubst, + otTables.ChainContextSubst, + otTables.ContextPos, + otTables.ChainContextPos, +) +def mapLookups(self, lookupMap): + c = self.__merge_classify_context() + + if self.Format in [1, 2]: + for rs in getattr(self, c.RuleSet): + if not rs: + continue + for r in getattr(rs, c.Rule): + if not r: + continue + for ll in getattr(r, c.LookupRecord): + if not ll: + continue + ll.LookupListIndex = lookupMap[ll.LookupListIndex] + elif self.Format == 3: + for ll in getattr(self, c.LookupRecord): + if not ll: + continue + ll.LookupListIndex = lookupMap[ll.LookupListIndex] + else: + assert 0, "unknown format: %s" % self.Format + + +@add_method(otTables.ExtensionSubst, otTables.ExtensionPos) +def mapLookups(self, lookupMap): + if self.Format == 1: + self.ExtSubTable.mapLookups(lookupMap) + else: + assert 0, "unknown format: %s" % self.Format + + +@add_method(otTables.Lookup) +def mapLookups(self, lookupMap): + for st in self.SubTable: + if not st: + continue + st.mapLookups(lookupMap) + + +@add_method(otTables.LookupList) +def mapLookups(self, lookupMap): + for l in self.Lookup: + if not l: + continue + l.mapLookups(lookupMap) + + +@add_method(otTables.Lookup) +def mapMarkFilteringSets(self, markFilteringSetMap): + if self.LookupFlag & 0x0010: + self.MarkFilteringSet = markFilteringSetMap[self.MarkFilteringSet] + + +@add_method(otTables.LookupList) +def mapMarkFilteringSets(self, markFilteringSetMap): + for l in self.Lookup: + if not l: + continue + l.mapMarkFilteringSets(markFilteringSetMap) + + +@add_method(otTables.Feature) +def mapLookups(self, lookupMap): + self.LookupListIndex = [lookupMap[i] for i in self.LookupListIndex] + + +@add_method(otTables.FeatureList) +def mapLookups(self, lookupMap): + for f in self.FeatureRecord: + if not f or not f.Feature: + continue + f.Feature.mapLookups(lookupMap) + + +@add_method(otTables.DefaultLangSys, otTables.LangSys) +def mapFeatures(self, featureMap): + self.FeatureIndex = [featureMap[i] for i in self.FeatureIndex] + if self.ReqFeatureIndex != 65535: + self.ReqFeatureIndex = featureMap[self.ReqFeatureIndex] + + +@add_method(otTables.Script) +def mapFeatures(self, featureMap): + if self.DefaultLangSys: + self.DefaultLangSys.mapFeatures(featureMap) + for l in self.LangSysRecord: + if not l or not l.LangSys: + continue + l.LangSys.mapFeatures(featureMap) + + +@add_method(otTables.ScriptList) +def mapFeatures(self, featureMap): + for s in self.ScriptRecord: + if not s or not s.Script: + continue + s.Script.mapFeatures(featureMap) + + +def layoutPreMerge(font): + # Map indices to references + + GDEF = font.get("GDEF") + GSUB = font.get("GSUB") + GPOS = font.get("GPOS") + + for t in [GSUB, GPOS]: + if not t: + continue + + if t.table.LookupList: + lookupMap = {i: v for i, v in enumerate(t.table.LookupList.Lookup)} + t.table.LookupList.mapLookups(lookupMap) + t.table.FeatureList.mapLookups(lookupMap) + + if ( + GDEF + and GDEF.table.Version >= 0x00010002 + and GDEF.table.MarkGlyphSetsDef + ): + markFilteringSetMap = { + i: v for i, v in enumerate(GDEF.table.MarkGlyphSetsDef.Coverage) + } + t.table.LookupList.mapMarkFilteringSets(markFilteringSetMap) + + if t.table.FeatureList and t.table.ScriptList: + featureMap = {i: v for i, v in enumerate(t.table.FeatureList.FeatureRecord)} + t.table.ScriptList.mapFeatures(featureMap) + + # TODO FeatureParams nameIDs + + +def layoutPostMerge(font): + # Map references back to indices + + GDEF = font.get("GDEF") + GSUB = font.get("GSUB") + GPOS = font.get("GPOS") + + for t in [GSUB, GPOS]: + if not t: + continue + + if t.table.FeatureList and t.table.ScriptList: + # Collect unregistered (new) features. + featureMap = GregariousIdentityDict(t.table.FeatureList.FeatureRecord) + t.table.ScriptList.mapFeatures(featureMap) + + # Record used features. + featureMap = AttendanceRecordingIdentityDict( + t.table.FeatureList.FeatureRecord + ) + t.table.ScriptList.mapFeatures(featureMap) + usedIndices = featureMap.s + + # Remove unused features + t.table.FeatureList.FeatureRecord = [ + f + for i, f in enumerate(t.table.FeatureList.FeatureRecord) + if i in usedIndices + ] + + # Map back to indices. + featureMap = NonhashableDict(t.table.FeatureList.FeatureRecord) + t.table.ScriptList.mapFeatures(featureMap) + + t.table.FeatureList.FeatureCount = len(t.table.FeatureList.FeatureRecord) + + if t.table.LookupList: + # Collect unregistered (new) lookups. + lookupMap = GregariousIdentityDict(t.table.LookupList.Lookup) + t.table.FeatureList.mapLookups(lookupMap) + t.table.LookupList.mapLookups(lookupMap) + + # Record used lookups. + lookupMap = AttendanceRecordingIdentityDict(t.table.LookupList.Lookup) + t.table.FeatureList.mapLookups(lookupMap) + t.table.LookupList.mapLookups(lookupMap) + usedIndices = lookupMap.s + + # Remove unused lookups + t.table.LookupList.Lookup = [ + l for i, l in enumerate(t.table.LookupList.Lookup) if i in usedIndices + ] + + # Map back to indices. + lookupMap = NonhashableDict(t.table.LookupList.Lookup) + t.table.FeatureList.mapLookups(lookupMap) + t.table.LookupList.mapLookups(lookupMap) + + t.table.LookupList.LookupCount = len(t.table.LookupList.Lookup) + + if GDEF and GDEF.table.Version >= 0x00010002: + markFilteringSetMap = NonhashableDict( + GDEF.table.MarkGlyphSetsDef.Coverage + ) + t.table.LookupList.mapMarkFilteringSets(markFilteringSetMap) + + # TODO FeatureParams nameIDs diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/options.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/options.py new file mode 100644 index 0000000000000000000000000000000000000000..8bc8947138a80aa51278def2ebaa46523407a4e7 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/options.py @@ -0,0 +1,85 @@ +# Copyright 2013 Google, Inc. All Rights Reserved. +# +# Google Author(s): Behdad Esfahbod, Roozbeh Pournader + + +class Options(object): + class UnknownOptionError(Exception): + pass + + def __init__(self, **kwargs): + self.verbose = False + self.timing = False + self.drop_tables = [] + self.input_file = None + self.output_file = "merged.ttf" + self.import_file = None + + self.set(**kwargs) + + def set(self, **kwargs): + for k, v in kwargs.items(): + if not hasattr(self, k): + raise self.UnknownOptionError("Unknown option '%s'" % k) + setattr(self, k, v) + + def parse_opts(self, argv, ignore_unknown=[]): + ret = [] + opts = {} + for a in argv: + orig_a = a + if not a.startswith("--"): + ret.append(a) + continue + a = a[2:] + i = a.find("=") + op = "=" + if i == -1: + if a.startswith("no-"): + k = a[3:] + v = False + else: + k = a + v = True + else: + k = a[:i] + if k[-1] in "-+": + op = k[-1] + "=" # Ops is '-=' or '+=' now. + k = k[:-1] + v = a[i + 1 :] + ok = k + k = k.replace("-", "_") + if not hasattr(self, k): + if ignore_unknown is True or ok in ignore_unknown: + ret.append(orig_a) + continue + else: + raise self.UnknownOptionError("Unknown option '%s'" % a) + + ov = getattr(self, k) + if isinstance(ov, bool): + v = bool(v) + elif isinstance(ov, int): + v = int(v) + elif isinstance(ov, list): + vv = v.split(",") + if vv == [""]: + vv = [] + vv = [int(x, 0) if len(x) and x[0] in "0123456789" else x for x in vv] + if op == "=": + v = vv + elif op == "+=": + v = ov + v.extend(vv) + elif op == "-=": + v = ov + for x in vv: + if x in v: + v.remove(x) + else: + assert 0 + + opts[k] = v + self.set(**opts) + + return ret diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/arrayTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/arrayTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d949c9369157aa23db69090e7a8d11540ce48b9 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/arrayTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/cliTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/cliTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51b0b39b58640035b4e7684ba4ef0bf25b890f68 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/cliTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/eexec.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/eexec.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..451f041bcea3c9d53d3f1a4f38e13f6549860b08 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/eexec.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/filenames.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/filenames.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7c9b0f4a18ccb94b7742a99ee2d7e9a6bf9ecc4 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/filenames.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/iterTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/iterTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94fd1ef8402f9301f802a95cdad58c3192329501 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/iterTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/loggingTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/loggingTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fc85c4344990f0d818f99f39947467429d7bee0 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/loggingTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/macRes.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/macRes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17d5265568613165e9c3c5fa75519625a43d4a15 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/macRes.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psLib.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psLib.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb35581e93520c75edbb037a7c86846f2e00ca06 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psLib.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psOperators.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psOperators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe77dfd4e481cc6593bbc3fc01db2fe1fd47e5be Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psOperators.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/roundTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/roundTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..476752d33cb56d719e49ed2741c0c4c7e4beb36b Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/roundTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/textTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/textTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf9162202ffc1c01f94a85d31a5de7738b192c26 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/textTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/timeTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/timeTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77436fb51fe6fed182d5ad6b6e02e348f1c81598 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/timeTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/transform.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/transform.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc288ef993844fd16f6b3f3e111cd20e6e54e135 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/transform.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/treeTools.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/treeTools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6011a35030e1eb5b94676c97cbb88cdc3aa26807 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/treeTools.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/vector.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/vector.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6ae07972e6a1d6e0bf6cc52f436da44ddee89b5 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/vector.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/visitor.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/visitor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3da7a6f37f66ac2fdb7825a4ef38dc24de293e5 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/visitor.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/arrayTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/arrayTools.py new file mode 100644 index 0000000000000000000000000000000000000000..ced8d87a613c1b43d3d6c6c822e053aae92a08cd --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/arrayTools.py @@ -0,0 +1,424 @@ +"""Routines for calculating bounding boxes, point in rectangle calculations and +so on. +""" + +from fontTools.misc.roundTools import otRound +from fontTools.misc.vector import Vector as _Vector +import math +import warnings + + +def calcBounds(array): + """Calculate the bounding rectangle of a 2D points array. + + Args: + array: A sequence of 2D tuples. + + Returns: + A four-item tuple representing the bounding rectangle ``(xMin, yMin, xMax, yMax)``. + """ + if not array: + return 0, 0, 0, 0 + xs = [x for x, y in array] + ys = [y for x, y in array] + return min(xs), min(ys), max(xs), max(ys) + + +def calcIntBounds(array, round=otRound): + """Calculate the integer bounding rectangle of a 2D points array. + + Values are rounded to closest integer towards ``+Infinity`` using the + :func:`fontTools.misc.fixedTools.otRound` function by default, unless + an optional ``round`` function is passed. + + Args: + array: A sequence of 2D tuples. + round: A rounding function of type ``f(x: float) -> int``. + + Returns: + A four-item tuple of integers representing the bounding rectangle: + ``(xMin, yMin, xMax, yMax)``. + """ + return tuple(round(v) for v in calcBounds(array)) + + +def updateBounds(bounds, p, min=min, max=max): + """Add a point to a bounding rectangle. + + Args: + bounds: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax), or None``. + p: A 2D tuple representing a point. + min,max: functions to compute the minimum and maximum. + + Returns: + The updated bounding rectangle ``(xMin, yMin, xMax, yMax)``. + """ + (x, y) = p + if bounds is None: + return x, y, x, y + xMin, yMin, xMax, yMax = bounds + return min(xMin, x), min(yMin, y), max(xMax, x), max(yMax, y) + + +def pointInRect(p, rect): + """Test if a point is inside a bounding rectangle. + + Args: + p: A 2D tuple representing a point. + rect: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax)``. + + Returns: + ``True`` if the point is inside the rectangle, ``False`` otherwise. + """ + (x, y) = p + xMin, yMin, xMax, yMax = rect + return (xMin <= x <= xMax) and (yMin <= y <= yMax) + + +def pointsInRect(array, rect): + """Determine which points are inside a bounding rectangle. + + Args: + array: A sequence of 2D tuples. + rect: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax)``. + + Returns: + A list containing the points inside the rectangle. + """ + if len(array) < 1: + return [] + xMin, yMin, xMax, yMax = rect + return [(xMin <= x <= xMax) and (yMin <= y <= yMax) for x, y in array] + + +def vectorLength(vector): + """Calculate the length of the given vector. + + Args: + vector: A 2D tuple. + + Returns: + The Euclidean length of the vector. + """ + x, y = vector + return math.sqrt(x**2 + y**2) + + +def asInt16(array): + """Round a list of floats to 16-bit signed integers. + + Args: + array: List of float values. + + Returns: + A list of rounded integers. + """ + return [int(math.floor(i + 0.5)) for i in array] + + +def normRect(rect): + """Normalize a bounding box rectangle. + + This function "turns the rectangle the right way up", so that the following + holds:: + + xMin <= xMax and yMin <= yMax + + Args: + rect: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax)``. + + Returns: + A normalized bounding rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + return min(xMin, xMax), min(yMin, yMax), max(xMin, xMax), max(yMin, yMax) + + +def scaleRect(rect, x, y): + """Scale a bounding box rectangle. + + Args: + rect: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax)``. + x: Factor to scale the rectangle along the X axis. + Y: Factor to scale the rectangle along the Y axis. + + Returns: + A scaled bounding rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + return xMin * x, yMin * y, xMax * x, yMax * y + + +def offsetRect(rect, dx, dy): + """Offset a bounding box rectangle. + + Args: + rect: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax)``. + dx: Amount to offset the rectangle along the X axis. + dY: Amount to offset the rectangle along the Y axis. + + Returns: + An offset bounding rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + return xMin + dx, yMin + dy, xMax + dx, yMax + dy + + +def insetRect(rect, dx, dy): + """Inset a bounding box rectangle on all sides. + + Args: + rect: A bounding rectangle expressed as a tuple + ``(xMin, yMin, xMax, yMax)``. + dx: Amount to inset the rectangle along the X axis. + dY: Amount to inset the rectangle along the Y axis. + + Returns: + An inset bounding rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + return xMin + dx, yMin + dy, xMax - dx, yMax - dy + + +def sectRect(rect1, rect2): + """Test for rectangle-rectangle intersection. + + Args: + rect1: First bounding rectangle, expressed as tuples + ``(xMin, yMin, xMax, yMax)``. + rect2: Second bounding rectangle. + + Returns: + A boolean and a rectangle. + If the input rectangles intersect, returns ``True`` and the intersecting + rectangle. Returns ``False`` and ``(0, 0, 0, 0)`` if the input + rectangles don't intersect. + """ + (xMin1, yMin1, xMax1, yMax1) = rect1 + (xMin2, yMin2, xMax2, yMax2) = rect2 + xMin, yMin, xMax, yMax = ( + max(xMin1, xMin2), + max(yMin1, yMin2), + min(xMax1, xMax2), + min(yMax1, yMax2), + ) + if xMin >= xMax or yMin >= yMax: + return False, (0, 0, 0, 0) + return True, (xMin, yMin, xMax, yMax) + + +def unionRect(rect1, rect2): + """Determine union of bounding rectangles. + + Args: + rect1: First bounding rectangle, expressed as tuples + ``(xMin, yMin, xMax, yMax)``. + rect2: Second bounding rectangle. + + Returns: + The smallest rectangle in which both input rectangles are fully + enclosed. + """ + (xMin1, yMin1, xMax1, yMax1) = rect1 + (xMin2, yMin2, xMax2, yMax2) = rect2 + xMin, yMin, xMax, yMax = ( + min(xMin1, xMin2), + min(yMin1, yMin2), + max(xMax1, xMax2), + max(yMax1, yMax2), + ) + return (xMin, yMin, xMax, yMax) + + +def rectCenter(rect): + """Determine rectangle center. + + Args: + rect: Bounding rectangle, expressed as tuples + ``(xMin, yMin, xMax, yMax)``. + + Returns: + A 2D tuple representing the point at the center of the rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + return (xMin + xMax) / 2, (yMin + yMax) / 2 + + +def rectArea(rect): + """Determine rectangle area. + + Args: + rect: Bounding rectangle, expressed as tuples + ``(xMin, yMin, xMax, yMax)``. + + Returns: + The area of the rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + return (yMax - yMin) * (xMax - xMin) + + +def intRect(rect): + """Round a rectangle to integer values. + + Guarantees that the resulting rectangle is NOT smaller than the original. + + Args: + rect: Bounding rectangle, expressed as tuples + ``(xMin, yMin, xMax, yMax)``. + + Returns: + A rounded bounding rectangle. + """ + (xMin, yMin, xMax, yMax) = rect + xMin = int(math.floor(xMin)) + yMin = int(math.floor(yMin)) + xMax = int(math.ceil(xMax)) + yMax = int(math.ceil(yMax)) + return (xMin, yMin, xMax, yMax) + + +def quantizeRect(rect, factor=1): + """ + >>> bounds = (72.3, -218.4, 1201.3, 919.1) + >>> quantizeRect(bounds) + (72, -219, 1202, 920) + >>> quantizeRect(bounds, factor=10) + (70, -220, 1210, 920) + >>> quantizeRect(bounds, factor=100) + (0, -300, 1300, 1000) + """ + if factor < 1: + raise ValueError(f"Expected quantization factor >= 1, found: {factor!r}") + xMin, yMin, xMax, yMax = normRect(rect) + return ( + int(math.floor(xMin / factor) * factor), + int(math.floor(yMin / factor) * factor), + int(math.ceil(xMax / factor) * factor), + int(math.ceil(yMax / factor) * factor), + ) + + +class Vector(_Vector): + def __init__(self, *args, **kwargs): + warnings.warn( + "fontTools.misc.arrayTools.Vector has been deprecated, please use " + "fontTools.misc.vector.Vector instead.", + DeprecationWarning, + ) + + +def pairwise(iterable, reverse=False): + """Iterate over current and next items in iterable. + + Args: + iterable: An iterable + reverse: If true, iterate in reverse order. + + Returns: + A iterable yielding two elements per iteration. + + Example: + + >>> tuple(pairwise([])) + () + >>> tuple(pairwise([], reverse=True)) + () + >>> tuple(pairwise([0])) + ((0, 0),) + >>> tuple(pairwise([0], reverse=True)) + ((0, 0),) + >>> tuple(pairwise([0, 1])) + ((0, 1), (1, 0)) + >>> tuple(pairwise([0, 1], reverse=True)) + ((1, 0), (0, 1)) + >>> tuple(pairwise([0, 1, 2])) + ((0, 1), (1, 2), (2, 0)) + >>> tuple(pairwise([0, 1, 2], reverse=True)) + ((2, 1), (1, 0), (0, 2)) + >>> tuple(pairwise(['a', 'b', 'c', 'd'])) + (('a', 'b'), ('b', 'c'), ('c', 'd'), ('d', 'a')) + >>> tuple(pairwise(['a', 'b', 'c', 'd'], reverse=True)) + (('d', 'c'), ('c', 'b'), ('b', 'a'), ('a', 'd')) + """ + if not iterable: + return + if reverse: + it = reversed(iterable) + else: + it = iter(iterable) + first = next(it, None) + a = first + for b in it: + yield (a, b) + a = b + yield (a, first) + + +def _test(): + """ + >>> import math + >>> calcBounds([]) + (0, 0, 0, 0) + >>> calcBounds([(0, 40), (0, 100), (50, 50), (80, 10)]) + (0, 10, 80, 100) + >>> updateBounds((0, 0, 0, 0), (100, 100)) + (0, 0, 100, 100) + >>> pointInRect((50, 50), (0, 0, 100, 100)) + True + >>> pointInRect((0, 0), (0, 0, 100, 100)) + True + >>> pointInRect((100, 100), (0, 0, 100, 100)) + True + >>> not pointInRect((101, 100), (0, 0, 100, 100)) + True + >>> list(pointsInRect([(50, 50), (0, 0), (100, 100), (101, 100)], (0, 0, 100, 100))) + [True, True, True, False] + >>> vectorLength((3, 4)) + 5.0 + >>> vectorLength((1, 1)) == math.sqrt(2) + True + >>> list(asInt16([0, 0.1, 0.5, 0.9])) + [0, 0, 1, 1] + >>> normRect((0, 10, 100, 200)) + (0, 10, 100, 200) + >>> normRect((100, 200, 0, 10)) + (0, 10, 100, 200) + >>> scaleRect((10, 20, 50, 150), 1.5, 2) + (15.0, 40, 75.0, 300) + >>> offsetRect((10, 20, 30, 40), 5, 6) + (15, 26, 35, 46) + >>> insetRect((10, 20, 50, 60), 5, 10) + (15, 30, 45, 50) + >>> insetRect((10, 20, 50, 60), -5, -10) + (5, 10, 55, 70) + >>> intersects, rect = sectRect((0, 10, 20, 30), (0, 40, 20, 50)) + >>> not intersects + True + >>> intersects, rect = sectRect((0, 10, 20, 30), (5, 20, 35, 50)) + >>> intersects + 1 + >>> rect + (5, 20, 20, 30) + >>> unionRect((0, 10, 20, 30), (0, 40, 20, 50)) + (0, 10, 20, 50) + >>> rectCenter((0, 0, 100, 200)) + (50.0, 100.0) + >>> rectCenter((0, 0, 100, 199.0)) + (50.0, 99.5) + >>> intRect((0.9, 2.9, 3.1, 4.1)) + (0, 2, 4, 5) + """ + + +if __name__ == "__main__": + import sys + import doctest + + sys.exit(doctest.testmod().failed) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/cliTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/cliTools.py new file mode 100644 index 0000000000000000000000000000000000000000..8a64235bf075ebd52b58044a15d475058bd1fdd2 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/cliTools.py @@ -0,0 +1,53 @@ +"""Collection of utilities for command-line interfaces and console scripts.""" + +import os +import re + + +numberAddedRE = re.compile(r"#\d+$") + + +def makeOutputFileName( + input, outputDir=None, extension=None, overWrite=False, suffix="" +): + """Generates a suitable file name for writing output. + + Often tools will want to take a file, do some kind of transformation to it, + and write it out again. This function determines an appropriate name for the + output file, through one or more of the following steps: + + - changing the output directory + - appending suffix before file extension + - replacing the file extension + - suffixing the filename with a number (``#1``, ``#2``, etc.) to avoid + overwriting an existing file. + + Args: + input: Name of input file. + outputDir: Optionally, a new directory to write the file into. + suffix: Optionally, a string suffix is appended to file name before + the extension. + extension: Optionally, a replacement for the current file extension. + overWrite: Overwriting an existing file is permitted if true; if false + and the proposed filename exists, a new name will be generated by + adding an appropriate number suffix. + + Returns: + str: Suitable output filename + """ + dirName, fileName = os.path.split(input) + fileName, ext = os.path.splitext(fileName) + if outputDir: + dirName = outputDir + fileName = numberAddedRE.split(fileName)[0] + if extension is None: + extension = os.path.splitext(input)[1] + output = os.path.join(dirName, fileName + suffix + extension) + n = 1 + if not overWrite: + while os.path.exists(output): + output = os.path.join( + dirName, fileName + suffix + "#" + repr(n) + extension + ) + n += 1 + return output diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/cython.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/cython.py new file mode 100644 index 0000000000000000000000000000000000000000..2a42d94a3591e0e8e47f184b303e4aec0a6337ef --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/cython.py @@ -0,0 +1,27 @@ +""" Exports a no-op 'cython' namespace similar to +https://github.com/cython/cython/blob/master/Cython/Shadow.py + +This allows to optionally compile @cython decorated functions +(when cython is available at built time), or run the same code +as pure-python, without runtime dependency on cython module. + +We only define the symbols that we use. E.g. see fontTools.cu2qu +""" + +from types import SimpleNamespace + + +def _empty_decorator(x): + return x + + +compiled = False + +for name in ("double", "complex", "int"): + globals()[name] = None + +for name in ("cfunc", "inline"): + globals()[name] = _empty_decorator + +locals = lambda **_: _empty_decorator +returns = lambda _: _empty_decorator diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/dictTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/dictTools.py new file mode 100644 index 0000000000000000000000000000000000000000..cd3d394c25bc0f5ab49b502fdb614d01a9fef281 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/dictTools.py @@ -0,0 +1,83 @@ +"""Misc dict tools.""" + +__all__ = ["hashdict"] + + +# https://stackoverflow.com/questions/1151658/python-hashable-dicts +class hashdict(dict): + """ + hashable dict implementation, suitable for use as a key into + other dicts. + + >>> h1 = hashdict({"apples": 1, "bananas":2}) + >>> h2 = hashdict({"bananas": 3, "mangoes": 5}) + >>> h1+h2 + hashdict(apples=1, bananas=3, mangoes=5) + >>> d1 = {} + >>> d1[h1] = "salad" + >>> d1[h1] + 'salad' + >>> d1[h2] + Traceback (most recent call last): + ... + KeyError: hashdict(bananas=3, mangoes=5) + + based on answers from + http://stackoverflow.com/questions/1151658/python-hashable-dicts + + """ + + def __key(self): + return tuple(sorted(self.items())) + + def __repr__(self): + return "{0}({1})".format( + self.__class__.__name__, + ", ".join("{0}={1}".format(str(i[0]), repr(i[1])) for i in self.__key()), + ) + + def __hash__(self): + return hash(self.__key()) + + def __setitem__(self, key, value): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + def __delitem__(self, key): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + def clear(self): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + def pop(self, *args, **kwargs): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + def popitem(self, *args, **kwargs): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + def setdefault(self, *args, **kwargs): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + def update(self, *args, **kwargs): + raise TypeError( + "{0} does not support item assignment".format(self.__class__.__name__) + ) + + # update is not ok because it mutates the object + # __add__ is ok because it creates a new object + # while the new object is under construction, it's ok to mutate it + def __add__(self, right): + result = hashdict(self) + dict.update(result, right) + return result diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/fixedTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/fixedTools.py new file mode 100644 index 0000000000000000000000000000000000000000..330042871c521231f2a396add543dd425783722b --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/fixedTools.py @@ -0,0 +1,253 @@ +""" +The `OpenType specification `_ +defines two fixed-point data types: + +``Fixed`` + A 32-bit signed fixed-point number with a 16 bit twos-complement + magnitude component and 16 fractional bits. +``F2DOT14`` + A 16-bit signed fixed-point number with a 2 bit twos-complement + magnitude component and 14 fractional bits. + +To support reading and writing data with these data types, this module provides +functions for converting between fixed-point, float and string representations. + +.. data:: MAX_F2DOT14 + + The maximum value that can still fit in an F2Dot14. (1.99993896484375) +""" + +from .roundTools import otRound, nearestMultipleShortestRepr +import logging + +log = logging.getLogger(__name__) + +__all__ = [ + "MAX_F2DOT14", + "fixedToFloat", + "floatToFixed", + "floatToFixedToFloat", + "floatToFixedToStr", + "fixedToStr", + "strToFixed", + "strToFixedToFloat", + "ensureVersionIsLong", + "versionToFixed", +] + + +MAX_F2DOT14 = 0x7FFF / (1 << 14) + + +def fixedToFloat(value, precisionBits): + """Converts a fixed-point number to a float given the number of + precision bits. + + Args: + value (int): Number in fixed-point format. + precisionBits (int): Number of precision bits. + + Returns: + Floating point value. + + Examples:: + + >>> import math + >>> f = fixedToFloat(-10139, precisionBits=14) + >>> math.isclose(f, -0.61883544921875) + True + """ + return value / (1 << precisionBits) + + +def floatToFixed(value, precisionBits): + """Converts a float to a fixed-point number given the number of + precision bits. + + Args: + value (float): Floating point value. + precisionBits (int): Number of precision bits. + + Returns: + int: Fixed-point representation. + + Examples:: + + >>> floatToFixed(-0.61883544921875, precisionBits=14) + -10139 + >>> floatToFixed(-0.61884, precisionBits=14) + -10139 + """ + return otRound(value * (1 << precisionBits)) + + +def floatToFixedToFloat(value, precisionBits): + """Converts a float to a fixed-point number and back again. + + By converting the float to fixed, rounding it, and converting it back + to float again, this returns a floating point values which is exactly + representable in fixed-point format. + + Note: this **is** equivalent to ``fixedToFloat(floatToFixed(value))``. + + Args: + value (float): The input floating point value. + precisionBits (int): Number of precision bits. + + Returns: + float: The transformed and rounded value. + + Examples:: + >>> import math + >>> f1 = -0.61884 + >>> f2 = floatToFixedToFloat(-0.61884, precisionBits=14) + >>> f1 != f2 + True + >>> math.isclose(f2, -0.61883544921875) + True + """ + scale = 1 << precisionBits + return otRound(value * scale) / scale + + +def fixedToStr(value, precisionBits): + """Converts a fixed-point number to a string representing a decimal float. + + This chooses the float that has the shortest decimal representation (the least + number of fractional decimal digits). + + For example, to convert a fixed-point number in a 2.14 format, use + ``precisionBits=14``:: + + >>> fixedToStr(-10139, precisionBits=14) + '-0.61884' + + This is pretty slow compared to the simple division used in ``fixedToFloat``. + Use sporadically when you need to serialize or print the fixed-point number in + a human-readable form. + It uses nearestMultipleShortestRepr under the hood. + + Args: + value (int): The fixed-point value to convert. + precisionBits (int): Number of precision bits, *up to a maximum of 16*. + + Returns: + str: A string representation of the value. + """ + scale = 1 << precisionBits + return nearestMultipleShortestRepr(value / scale, factor=1.0 / scale) + + +def strToFixed(string, precisionBits): + """Converts a string representing a decimal float to a fixed-point number. + + Args: + string (str): A string representing a decimal float. + precisionBits (int): Number of precision bits, *up to a maximum of 16*. + + Returns: + int: Fixed-point representation. + + Examples:: + + >>> ## to convert a float string to a 2.14 fixed-point number: + >>> strToFixed('-0.61884', precisionBits=14) + -10139 + """ + value = float(string) + return otRound(value * (1 << precisionBits)) + + +def strToFixedToFloat(string, precisionBits): + """Convert a string to a decimal float with fixed-point rounding. + + This first converts string to a float, then turns it into a fixed-point + number with ``precisionBits`` fractional binary digits, then back to a + float again. + + This is simply a shorthand for fixedToFloat(floatToFixed(float(s))). + + Args: + string (str): A string representing a decimal float. + precisionBits (int): Number of precision bits. + + Returns: + float: The transformed and rounded value. + + Examples:: + + >>> import math + >>> s = '-0.61884' + >>> bits = 14 + >>> f = strToFixedToFloat(s, precisionBits=bits) + >>> math.isclose(f, -0.61883544921875) + True + >>> f == fixedToFloat(floatToFixed(float(s), precisionBits=bits), precisionBits=bits) + True + """ + value = float(string) + scale = 1 << precisionBits + return otRound(value * scale) / scale + + +def floatToFixedToStr(value, precisionBits): + """Convert float to string with fixed-point rounding. + + This uses the shortest decimal representation (ie. the least + number of fractional decimal digits) to represent the equivalent + fixed-point number with ``precisionBits`` fractional binary digits. + It uses nearestMultipleShortestRepr under the hood. + + >>> floatToFixedToStr(-0.61883544921875, precisionBits=14) + '-0.61884' + + Args: + value (float): The float value to convert. + precisionBits (int): Number of precision bits, *up to a maximum of 16*. + + Returns: + str: A string representation of the value. + + """ + scale = 1 << precisionBits + return nearestMultipleShortestRepr(value, factor=1.0 / scale) + + +def ensureVersionIsLong(value): + """Ensure a table version is an unsigned long. + + OpenType table version numbers are expressed as a single unsigned long + comprising of an unsigned short major version and unsigned short minor + version. This function detects if the value to be used as a version number + looks too small (i.e. is less than ``0x10000``), and converts it to + fixed-point using :func:`floatToFixed` if so. + + Args: + value (Number): a candidate table version number. + + Returns: + int: A table version number, possibly corrected to fixed-point. + """ + if value < 0x10000: + newValue = floatToFixed(value, 16) + log.warning( + "Table version value is a float: %.4f; " "fix to use hex instead: 0x%08x", + value, + newValue, + ) + value = newValue + return value + + +def versionToFixed(value): + """Ensure a table version number is fixed-point. + + Args: + value (str): a candidate table version number. + + Returns: + int: A table version number, possibly corrected to fixed-point. + """ + value = int(value, 0) if value.startswith("0") else float(value) + value = ensureVersionIsLong(value) + return value diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/iterTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/iterTools.py new file mode 100644 index 0000000000000000000000000000000000000000..d7b8305322c8d7d9ef847bd626b3e4077e04e6f7 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/iterTools.py @@ -0,0 +1,12 @@ +from itertools import * + +# Python 3.12: +if "batched" not in globals(): + # https://docs.python.org/3/library/itertools.html#itertools.batched + def batched(iterable, n): + # batched('ABCDEFG', 3) --> ABC DEF G + if n < 1: + raise ValueError("n must be at least one") + it = iter(iterable) + while batch := tuple(islice(it, n)): + yield batch diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/loggingTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/loggingTools.py new file mode 100644 index 0000000000000000000000000000000000000000..78704f5a9aa4811db98aa3132ed3f12ee0853ee2 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/loggingTools.py @@ -0,0 +1,543 @@ +import sys +import logging +import timeit +from functools import wraps +from collections.abc import Mapping, Callable +import warnings +from logging import PercentStyle + + +# default logging level used by Timer class +TIME_LEVEL = logging.DEBUG + +# per-level format strings used by the default formatter +# (the level name is not printed for INFO and DEBUG messages) +DEFAULT_FORMATS = { + "*": "%(levelname)s: %(message)s", + "INFO": "%(message)s", + "DEBUG": "%(message)s", +} + + +class LevelFormatter(logging.Formatter): + """Log formatter with level-specific formatting. + + Formatter class which optionally takes a dict of logging levels to + format strings, allowing to customise the log records appearance for + specific levels. + + + Attributes: + fmt: A dictionary mapping logging levels to format strings. + The ``*`` key identifies the default format string. + datefmt: As per py:class:`logging.Formatter` + style: As per py:class:`logging.Formatter` + + >>> import sys + >>> handler = logging.StreamHandler(sys.stdout) + >>> formatter = LevelFormatter( + ... fmt={ + ... '*': '[%(levelname)s] %(message)s', + ... 'DEBUG': '%(name)s [%(levelname)s] %(message)s', + ... 'INFO': '%(message)s', + ... }) + >>> handler.setFormatter(formatter) + >>> log = logging.getLogger('test') + >>> log.setLevel(logging.DEBUG) + >>> log.addHandler(handler) + >>> log.debug('this uses a custom format string') + test [DEBUG] this uses a custom format string + >>> log.info('this also uses a custom format string') + this also uses a custom format string + >>> log.warning("this one uses the default format string") + [WARNING] this one uses the default format string + """ + + def __init__(self, fmt=None, datefmt=None, style="%"): + if style != "%": + raise ValueError( + "only '%' percent style is supported in both python 2 and 3" + ) + if fmt is None: + fmt = DEFAULT_FORMATS + if isinstance(fmt, str): + default_format = fmt + custom_formats = {} + elif isinstance(fmt, Mapping): + custom_formats = dict(fmt) + default_format = custom_formats.pop("*", None) + else: + raise TypeError("fmt must be a str or a dict of str: %r" % fmt) + super(LevelFormatter, self).__init__(default_format, datefmt) + self.default_format = self._fmt + self.custom_formats = {} + for level, fmt in custom_formats.items(): + level = logging._checkLevel(level) + self.custom_formats[level] = fmt + + def format(self, record): + if self.custom_formats: + fmt = self.custom_formats.get(record.levelno, self.default_format) + if self._fmt != fmt: + self._fmt = fmt + # for python >= 3.2, _style needs to be set if _fmt changes + if PercentStyle: + self._style = PercentStyle(fmt) + return super(LevelFormatter, self).format(record) + + +def configLogger(**kwargs): + """A more sophisticated logging system configuation manager. + + This is more or less the same as :py:func:`logging.basicConfig`, + with some additional options and defaults. + + The default behaviour is to create a ``StreamHandler`` which writes to + sys.stderr, set a formatter using the ``DEFAULT_FORMATS`` strings, and add + the handler to the top-level library logger ("fontTools"). + + A number of optional keyword arguments may be specified, which can alter + the default behaviour. + + Args: + + logger: Specifies the logger name or a Logger instance to be + configured. (Defaults to "fontTools" logger). Unlike ``basicConfig``, + this function can be called multiple times to reconfigure a logger. + If the logger or any of its children already exists before the call is + made, they will be reset before the new configuration is applied. + filename: Specifies that a ``FileHandler`` be created, using the + specified filename, rather than a ``StreamHandler``. + filemode: Specifies the mode to open the file, if filename is + specified. (If filemode is unspecified, it defaults to ``a``). + format: Use the specified format string for the handler. This + argument also accepts a dictionary of format strings keyed by + level name, to allow customising the records appearance for + specific levels. The special ``'*'`` key is for 'any other' level. + datefmt: Use the specified date/time format. + level: Set the logger level to the specified level. + stream: Use the specified stream to initialize the StreamHandler. Note + that this argument is incompatible with ``filename`` - if both + are present, ``stream`` is ignored. + handlers: If specified, this should be an iterable of already created + handlers, which will be added to the logger. Any handler in the + list which does not have a formatter assigned will be assigned the + formatter created in this function. + filters: If specified, this should be an iterable of already created + filters. If the ``handlers`` do not already have filters assigned, + these filters will be added to them. + propagate: All loggers have a ``propagate`` attribute which determines + whether to continue searching for handlers up the logging hierarchy. + If not provided, the "propagate" attribute will be set to ``False``. + """ + # using kwargs to enforce keyword-only arguments in py2. + handlers = kwargs.pop("handlers", None) + if handlers is None: + if "stream" in kwargs and "filename" in kwargs: + raise ValueError( + "'stream' and 'filename' should not be " "specified together" + ) + else: + if "stream" in kwargs or "filename" in kwargs: + raise ValueError( + "'stream' or 'filename' should not be " + "specified together with 'handlers'" + ) + if handlers is None: + filename = kwargs.pop("filename", None) + mode = kwargs.pop("filemode", "a") + if filename: + h = logging.FileHandler(filename, mode) + else: + stream = kwargs.pop("stream", None) + h = logging.StreamHandler(stream) + handlers = [h] + # By default, the top-level library logger is configured. + logger = kwargs.pop("logger", "fontTools") + if not logger or isinstance(logger, str): + # empty "" or None means the 'root' logger + logger = logging.getLogger(logger) + # before (re)configuring, reset named logger and its children (if exist) + _resetExistingLoggers(parent=logger.name) + # use DEFAULT_FORMATS if 'format' is None + fs = kwargs.pop("format", None) + dfs = kwargs.pop("datefmt", None) + # XXX: '%' is the only format style supported on both py2 and 3 + style = kwargs.pop("style", "%") + fmt = LevelFormatter(fs, dfs, style) + filters = kwargs.pop("filters", []) + for h in handlers: + if h.formatter is None: + h.setFormatter(fmt) + if not h.filters: + for f in filters: + h.addFilter(f) + logger.addHandler(h) + if logger.name != "root": + # stop searching up the hierarchy for handlers + logger.propagate = kwargs.pop("propagate", False) + # set a custom severity level + level = kwargs.pop("level", None) + if level is not None: + logger.setLevel(level) + if kwargs: + keys = ", ".join(kwargs.keys()) + raise ValueError("Unrecognised argument(s): %s" % keys) + + +def _resetExistingLoggers(parent="root"): + """Reset the logger named 'parent' and all its children to their initial + state, if they already exist in the current configuration. + """ + root = logging.root + # get sorted list of all existing loggers + existing = sorted(root.manager.loggerDict.keys()) + if parent == "root": + # all the existing loggers are children of 'root' + loggers_to_reset = [parent] + existing + elif parent not in existing: + # nothing to do + return + elif parent in existing: + loggers_to_reset = [parent] + # collect children, starting with the entry after parent name + i = existing.index(parent) + 1 + prefixed = parent + "." + pflen = len(prefixed) + num_existing = len(existing) + while i < num_existing: + if existing[i][:pflen] == prefixed: + loggers_to_reset.append(existing[i]) + i += 1 + for name in loggers_to_reset: + if name == "root": + root.setLevel(logging.WARNING) + for h in root.handlers[:]: + root.removeHandler(h) + for f in root.filters[:]: + root.removeFilters(f) + root.disabled = False + else: + logger = root.manager.loggerDict[name] + logger.level = logging.NOTSET + logger.handlers = [] + logger.filters = [] + logger.propagate = True + logger.disabled = False + + +class Timer(object): + """Keeps track of overall time and split/lap times. + + >>> import time + >>> timer = Timer() + >>> time.sleep(0.01) + >>> print("First lap:", timer.split()) + First lap: ... + >>> time.sleep(0.02) + >>> print("Second lap:", timer.split()) + Second lap: ... + >>> print("Overall time:", timer.time()) + Overall time: ... + + Can be used as a context manager inside with-statements. + + >>> with Timer() as t: + ... time.sleep(0.01) + >>> print("%0.3f seconds" % t.elapsed) + 0... seconds + + If initialised with a logger, it can log the elapsed time automatically + upon exiting the with-statement. + + >>> import logging + >>> log = logging.getLogger("my-fancy-timer-logger") + >>> configLogger(logger=log, level="DEBUG", format="%(message)s", stream=sys.stdout) + >>> with Timer(log, 'do something'): + ... time.sleep(0.01) + Took ... to do something + + The same Timer instance, holding a reference to a logger, can be reused + in multiple with-statements, optionally with different messages or levels. + + >>> timer = Timer(log) + >>> with timer(): + ... time.sleep(0.01) + elapsed time: ...s + >>> with timer('redo it', level=logging.INFO): + ... time.sleep(0.02) + Took ... to redo it + + It can also be used as a function decorator to log the time elapsed to run + the decorated function. + + >>> @timer() + ... def test1(): + ... time.sleep(0.01) + >>> @timer('run test 2', level=logging.INFO) + ... def test2(): + ... time.sleep(0.02) + >>> test1() + Took ... to run 'test1' + >>> test2() + Took ... to run test 2 + """ + + # timeit.default_timer choses the most accurate clock for each platform + _time = timeit.default_timer + default_msg = "elapsed time: %(time).3fs" + default_format = "Took %(time).3fs to %(msg)s" + + def __init__(self, logger=None, msg=None, level=None, start=None): + self.reset(start) + if logger is None: + for arg in ("msg", "level"): + if locals().get(arg) is not None: + raise ValueError("'%s' can't be specified without a 'logger'" % arg) + self.logger = logger + self.level = level if level is not None else TIME_LEVEL + self.msg = msg + + def reset(self, start=None): + """Reset timer to 'start_time' or the current time.""" + if start is None: + self.start = self._time() + else: + self.start = start + self.last = self.start + self.elapsed = 0.0 + + def time(self): + """Return the overall time (in seconds) since the timer started.""" + return self._time() - self.start + + def split(self): + """Split and return the lap time (in seconds) in between splits.""" + current = self._time() + self.elapsed = current - self.last + self.last = current + return self.elapsed + + def formatTime(self, msg, time): + """Format 'time' value in 'msg' and return formatted string. + If 'msg' contains a '%(time)' format string, try to use that. + Otherwise, use the predefined 'default_format'. + If 'msg' is empty or None, fall back to 'default_msg'. + """ + if not msg: + msg = self.default_msg + if msg.find("%(time)") < 0: + msg = self.default_format % {"msg": msg, "time": time} + else: + try: + msg = msg % {"time": time} + except (KeyError, ValueError): + pass # skip if the format string is malformed + return msg + + def __enter__(self): + """Start a new lap""" + self.last = self._time() + self.elapsed = 0.0 + return self + + def __exit__(self, exc_type, exc_value, traceback): + """End the current lap. If timer has a logger, log the time elapsed, + using the format string in self.msg (or the default one). + """ + time = self.split() + if self.logger is None or exc_type: + # if there's no logger attached, or if any exception occurred in + # the with-statement, exit without logging the time + return + message = self.formatTime(self.msg, time) + # Allow log handlers to see the individual parts to facilitate things + # like a server accumulating aggregate stats. + msg_parts = {"msg": self.msg, "time": time} + self.logger.log(self.level, message, msg_parts) + + def __call__(self, func_or_msg=None, **kwargs): + """If the first argument is a function, return a decorator which runs + the wrapped function inside Timer's context manager. + Otherwise, treat the first argument as a 'msg' string and return an updated + Timer instance, referencing the same logger. + A 'level' keyword can also be passed to override self.level. + """ + if isinstance(func_or_msg, Callable): + func = func_or_msg + # use the function name when no explicit 'msg' is provided + if not self.msg: + self.msg = "run '%s'" % func.__name__ + + @wraps(func) + def wrapper(*args, **kwds): + with self: + return func(*args, **kwds) + + return wrapper + else: + msg = func_or_msg or kwargs.get("msg") + level = kwargs.get("level", self.level) + return self.__class__(self.logger, msg, level) + + def __float__(self): + return self.elapsed + + def __int__(self): + return int(self.elapsed) + + def __str__(self): + return "%.3f" % self.elapsed + + +class ChannelsFilter(logging.Filter): + """Provides a hierarchical filter for log entries based on channel names. + + Filters out records emitted from a list of enabled channel names, + including their children. It works the same as the ``logging.Filter`` + class, but allows the user to specify multiple channel names. + + >>> import sys + >>> handler = logging.StreamHandler(sys.stdout) + >>> handler.setFormatter(logging.Formatter("%(message)s")) + >>> filter = ChannelsFilter("A.B", "C.D") + >>> handler.addFilter(filter) + >>> root = logging.getLogger() + >>> root.addHandler(handler) + >>> root.setLevel(level=logging.DEBUG) + >>> logging.getLogger('A.B').debug('this record passes through') + this record passes through + >>> logging.getLogger('A.B.C').debug('records from children also pass') + records from children also pass + >>> logging.getLogger('C.D').debug('this one as well') + this one as well + >>> logging.getLogger('A.B.').debug('also this one') + also this one + >>> logging.getLogger('A.F').debug('but this one does not!') + >>> logging.getLogger('C.DE').debug('neither this one!') + """ + + def __init__(self, *names): + self.names = names + self.num = len(names) + self.lengths = {n: len(n) for n in names} + + def filter(self, record): + if self.num == 0: + return True + for name in self.names: + nlen = self.lengths[name] + if name == record.name: + return True + elif record.name.find(name, 0, nlen) == 0 and record.name[nlen] == ".": + return True + return False + + +class CapturingLogHandler(logging.Handler): + def __init__(self, logger, level): + super(CapturingLogHandler, self).__init__(level=level) + self.records = [] + if isinstance(logger, str): + self.logger = logging.getLogger(logger) + else: + self.logger = logger + + def __enter__(self): + self.original_disabled = self.logger.disabled + self.original_level = self.logger.level + self.original_propagate = self.logger.propagate + + self.logger.addHandler(self) + self.logger.setLevel(self.level) + self.logger.disabled = False + self.logger.propagate = False + + return self + + def __exit__(self, type, value, traceback): + self.logger.removeHandler(self) + self.logger.setLevel(self.original_level) + self.logger.disabled = self.original_disabled + self.logger.propagate = self.original_propagate + + return self + + def emit(self, record): + self.records.append(record) + + def assertRegex(self, regexp, msg=None): + import re + + pattern = re.compile(regexp) + for r in self.records: + if pattern.search(r.getMessage()): + return True + if msg is None: + msg = "Pattern '%s' not found in logger records" % regexp + assert 0, msg + + +class LogMixin(object): + """Mixin class that adds logging functionality to another class. + + You can define a new class that subclasses from ``LogMixin`` as well as + other base classes through multiple inheritance. + All instances of that class will have a ``log`` property that returns + a ``logging.Logger`` named after their respective ``.``. + + For example: + + >>> class BaseClass(object): + ... pass + >>> class MyClass(LogMixin, BaseClass): + ... pass + >>> a = MyClass() + >>> isinstance(a.log, logging.Logger) + True + >>> print(a.log.name) + fontTools.misc.loggingTools.MyClass + >>> class AnotherClass(MyClass): + ... pass + >>> b = AnotherClass() + >>> isinstance(b.log, logging.Logger) + True + >>> print(b.log.name) + fontTools.misc.loggingTools.AnotherClass + """ + + @property + def log(self): + if not hasattr(self, "_log"): + name = ".".join((self.__class__.__module__, self.__class__.__name__)) + self._log = logging.getLogger(name) + return self._log + + +def deprecateArgument(name, msg, category=UserWarning): + """Raise a warning about deprecated function argument 'name'.""" + warnings.warn("%r is deprecated; %s" % (name, msg), category=category, stacklevel=3) + + +def deprecateFunction(msg, category=UserWarning): + """Decorator to raise a warning when a deprecated function is called.""" + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + warnings.warn( + "%r is deprecated; %s" % (func.__name__, msg), + category=category, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + return decorator + + +if __name__ == "__main__": + import doctest + + sys.exit(doctest.testmod(optionflags=doctest.ELLIPSIS).failed) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/macRes.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/macRes.py new file mode 100644 index 0000000000000000000000000000000000000000..f5a6cfe4789a351204d0ce6fa2abb5651487c5c0 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/macRes.py @@ -0,0 +1,261 @@ +from io import BytesIO +import struct +from fontTools.misc import sstruct +from fontTools.misc.textTools import bytesjoin, tostr +from collections import OrderedDict +from collections.abc import MutableMapping + + +class ResourceError(Exception): + pass + + +class ResourceReader(MutableMapping): + """Reader for Mac OS resource forks. + + Parses a resource fork and returns resources according to their type. + If run on OS X, this will open the resource fork in the filesystem. + Otherwise, it will open the file itself and attempt to read it as + though it were a resource fork. + + The returned object can be indexed by type and iterated over, + returning in each case a list of py:class:`Resource` objects + representing all the resources of a certain type. + + """ + + def __init__(self, fileOrPath): + """Open a file + + Args: + fileOrPath: Either an object supporting a ``read`` method, an + ``os.PathLike`` object, or a string. + """ + self._resources = OrderedDict() + if hasattr(fileOrPath, "read"): + self.file = fileOrPath + else: + try: + # try reading from the resource fork (only works on OS X) + self.file = self.openResourceFork(fileOrPath) + self._readFile() + return + except (ResourceError, IOError): + # if it fails, use the data fork + self.file = self.openDataFork(fileOrPath) + self._readFile() + + @staticmethod + def openResourceFork(path): + if hasattr(path, "__fspath__"): # support os.PathLike objects + path = path.__fspath__() + with open(path + "/..namedfork/rsrc", "rb") as resfork: + data = resfork.read() + infile = BytesIO(data) + infile.name = path + return infile + + @staticmethod + def openDataFork(path): + with open(path, "rb") as datafork: + data = datafork.read() + infile = BytesIO(data) + infile.name = path + return infile + + def _readFile(self): + self._readHeaderAndMap() + self._readTypeList() + + def _read(self, numBytes, offset=None): + if offset is not None: + try: + self.file.seek(offset) + except OverflowError: + raise ResourceError("Failed to seek offset ('offset' is too large)") + if self.file.tell() != offset: + raise ResourceError("Failed to seek offset (reached EOF)") + try: + data = self.file.read(numBytes) + except OverflowError: + raise ResourceError("Cannot read resource ('numBytes' is too large)") + if len(data) != numBytes: + raise ResourceError("Cannot read resource (not enough data)") + return data + + def _readHeaderAndMap(self): + self.file.seek(0) + headerData = self._read(ResourceForkHeaderSize) + sstruct.unpack(ResourceForkHeader, headerData, self) + # seek to resource map, skip reserved + mapOffset = self.mapOffset + 22 + resourceMapData = self._read(ResourceMapHeaderSize, mapOffset) + sstruct.unpack(ResourceMapHeader, resourceMapData, self) + self.absTypeListOffset = self.mapOffset + self.typeListOffset + self.absNameListOffset = self.mapOffset + self.nameListOffset + + def _readTypeList(self): + absTypeListOffset = self.absTypeListOffset + numTypesData = self._read(2, absTypeListOffset) + (self.numTypes,) = struct.unpack(">H", numTypesData) + absTypeListOffset2 = absTypeListOffset + 2 + for i in range(self.numTypes + 1): + resTypeItemOffset = absTypeListOffset2 + ResourceTypeItemSize * i + resTypeItemData = self._read(ResourceTypeItemSize, resTypeItemOffset) + item = sstruct.unpack(ResourceTypeItem, resTypeItemData) + resType = tostr(item["type"], encoding="mac-roman") + refListOffset = absTypeListOffset + item["refListOffset"] + numRes = item["numRes"] + 1 + resources = self._readReferenceList(resType, refListOffset, numRes) + self._resources[resType] = resources + + def _readReferenceList(self, resType, refListOffset, numRes): + resources = [] + for i in range(numRes): + refOffset = refListOffset + ResourceRefItemSize * i + refData = self._read(ResourceRefItemSize, refOffset) + res = Resource(resType) + res.decompile(refData, self) + resources.append(res) + return resources + + def __getitem__(self, resType): + return self._resources[resType] + + def __delitem__(self, resType): + del self._resources[resType] + + def __setitem__(self, resType, resources): + self._resources[resType] = resources + + def __len__(self): + return len(self._resources) + + def __iter__(self): + return iter(self._resources) + + def keys(self): + return self._resources.keys() + + @property + def types(self): + """A list of the types of resources in the resource fork.""" + return list(self._resources.keys()) + + def countResources(self, resType): + """Return the number of resources of a given type.""" + try: + return len(self[resType]) + except KeyError: + return 0 + + def getIndices(self, resType): + """Returns a list of indices of resources of a given type.""" + numRes = self.countResources(resType) + if numRes: + return list(range(1, numRes + 1)) + else: + return [] + + def getNames(self, resType): + """Return list of names of all resources of a given type.""" + return [res.name for res in self.get(resType, []) if res.name is not None] + + def getIndResource(self, resType, index): + """Return resource of given type located at an index ranging from 1 + to the number of resources for that type, or None if not found. + """ + if index < 1: + return None + try: + res = self[resType][index - 1] + except (KeyError, IndexError): + return None + return res + + def getNamedResource(self, resType, name): + """Return the named resource of given type, else return None.""" + name = tostr(name, encoding="mac-roman") + for res in self.get(resType, []): + if res.name == name: + return res + return None + + def close(self): + if not self.file.closed: + self.file.close() + + +class Resource(object): + """Represents a resource stored within a resource fork. + + Attributes: + type: resource type. + data: resource data. + id: ID. + name: resource name. + attr: attributes. + """ + + def __init__( + self, resType=None, resData=None, resID=None, resName=None, resAttr=None + ): + self.type = resType + self.data = resData + self.id = resID + self.name = resName + self.attr = resAttr + + def decompile(self, refData, reader): + sstruct.unpack(ResourceRefItem, refData, self) + # interpret 3-byte dataOffset as (padded) ULONG to unpack it with struct + (self.dataOffset,) = struct.unpack(">L", bytesjoin([b"\0", self.dataOffset])) + absDataOffset = reader.dataOffset + self.dataOffset + (dataLength,) = struct.unpack(">L", reader._read(4, absDataOffset)) + self.data = reader._read(dataLength) + if self.nameOffset == -1: + return + absNameOffset = reader.absNameListOffset + self.nameOffset + (nameLength,) = struct.unpack("B", reader._read(1, absNameOffset)) + (name,) = struct.unpack(">%ss" % nameLength, reader._read(nameLength)) + self.name = tostr(name, encoding="mac-roman") + + +ResourceForkHeader = """ + > # big endian + dataOffset: L + mapOffset: L + dataLen: L + mapLen: L +""" + +ResourceForkHeaderSize = sstruct.calcsize(ResourceForkHeader) + +ResourceMapHeader = """ + > # big endian + attr: H + typeListOffset: H + nameListOffset: H +""" + +ResourceMapHeaderSize = sstruct.calcsize(ResourceMapHeader) + +ResourceTypeItem = """ + > # big endian + type: 4s + numRes: H + refListOffset: H +""" + +ResourceTypeItemSize = sstruct.calcsize(ResourceTypeItem) + +ResourceRefItem = """ + > # big endian + id: h + nameOffset: h + attr: B + dataOffset: 3s + reserved: L +""" + +ResourceRefItemSize = sstruct.calcsize(ResourceRefItem) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/plistlib/py.typed b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/plistlib/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psCharStrings.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psCharStrings.py new file mode 100644 index 0000000000000000000000000000000000000000..5d881c5816c58a2283687b67873854bba3af596b --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psCharStrings.py @@ -0,0 +1,1496 @@ +"""psCharStrings.py -- module implementing various kinds of CharStrings: +CFF dictionary data and Type1/Type2 CharStrings. +""" + +from fontTools.misc.fixedTools import ( + fixedToFloat, + floatToFixed, + floatToFixedToStr, + strToFixedToFloat, +) +from fontTools.misc.textTools import bytechr, byteord, bytesjoin, strjoin +from fontTools.pens.boundsPen import BoundsPen +import struct +import logging + + +log = logging.getLogger(__name__) + + +def read_operator(self, b0, data, index): + if b0 == 12: + op = (b0, byteord(data[index])) + index = index + 1 + else: + op = b0 + try: + operator = self.operators[op] + except KeyError: + return None, index + value = self.handle_operator(operator) + return value, index + + +def read_byte(self, b0, data, index): + return b0 - 139, index + + +def read_smallInt1(self, b0, data, index): + b1 = byteord(data[index]) + return (b0 - 247) * 256 + b1 + 108, index + 1 + + +def read_smallInt2(self, b0, data, index): + b1 = byteord(data[index]) + return -(b0 - 251) * 256 - b1 - 108, index + 1 + + +def read_shortInt(self, b0, data, index): + (value,) = struct.unpack(">h", data[index : index + 2]) + return value, index + 2 + + +def read_longInt(self, b0, data, index): + (value,) = struct.unpack(">l", data[index : index + 4]) + return value, index + 4 + + +def read_fixed1616(self, b0, data, index): + (value,) = struct.unpack(">l", data[index : index + 4]) + return fixedToFloat(value, precisionBits=16), index + 4 + + +def read_reserved(self, b0, data, index): + assert NotImplementedError + return NotImplemented, index + + +def read_realNumber(self, b0, data, index): + number = "" + while True: + b = byteord(data[index]) + index = index + 1 + nibble0 = (b & 0xF0) >> 4 + nibble1 = b & 0x0F + if nibble0 == 0xF: + break + number = number + realNibbles[nibble0] + if nibble1 == 0xF: + break + number = number + realNibbles[nibble1] + return float(number), index + + +t1OperandEncoding = [None] * 256 +t1OperandEncoding[0:32] = (32) * [read_operator] +t1OperandEncoding[32:247] = (247 - 32) * [read_byte] +t1OperandEncoding[247:251] = (251 - 247) * [read_smallInt1] +t1OperandEncoding[251:255] = (255 - 251) * [read_smallInt2] +t1OperandEncoding[255] = read_longInt +assert len(t1OperandEncoding) == 256 + +t2OperandEncoding = t1OperandEncoding[:] +t2OperandEncoding[28] = read_shortInt +t2OperandEncoding[255] = read_fixed1616 + +cffDictOperandEncoding = t2OperandEncoding[:] +cffDictOperandEncoding[29] = read_longInt +cffDictOperandEncoding[30] = read_realNumber +cffDictOperandEncoding[255] = read_reserved + + +realNibbles = [ + "0", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + ".", + "E", + "E-", + None, + "-", +] +realNibblesDict = {v: i for i, v in enumerate(realNibbles)} + +maxOpStack = 193 + + +def buildOperatorDict(operatorList): + oper = {} + opc = {} + for item in operatorList: + if len(item) == 2: + oper[item[0]] = item[1] + else: + oper[item[0]] = item[1:] + if isinstance(item[0], tuple): + opc[item[1]] = item[0] + else: + opc[item[1]] = (item[0],) + return oper, opc + + +t2Operators = [ + # opcode name + (1, "hstem"), + (3, "vstem"), + (4, "vmoveto"), + (5, "rlineto"), + (6, "hlineto"), + (7, "vlineto"), + (8, "rrcurveto"), + (10, "callsubr"), + (11, "return"), + (14, "endchar"), + (15, "vsindex"), + (16, "blend"), + (18, "hstemhm"), + (19, "hintmask"), + (20, "cntrmask"), + (21, "rmoveto"), + (22, "hmoveto"), + (23, "vstemhm"), + (24, "rcurveline"), + (25, "rlinecurve"), + (26, "vvcurveto"), + (27, "hhcurveto"), + # (28, 'shortint'), # not really an operator + (29, "callgsubr"), + (30, "vhcurveto"), + (31, "hvcurveto"), + ((12, 0), "ignore"), # dotsection. Yes, there a few very early OTF/CFF + # fonts with this deprecated operator. Just ignore it. + ((12, 3), "and"), + ((12, 4), "or"), + ((12, 5), "not"), + ((12, 8), "store"), + ((12, 9), "abs"), + ((12, 10), "add"), + ((12, 11), "sub"), + ((12, 12), "div"), + ((12, 13), "load"), + ((12, 14), "neg"), + ((12, 15), "eq"), + ((12, 18), "drop"), + ((12, 20), "put"), + ((12, 21), "get"), + ((12, 22), "ifelse"), + ((12, 23), "random"), + ((12, 24), "mul"), + ((12, 26), "sqrt"), + ((12, 27), "dup"), + ((12, 28), "exch"), + ((12, 29), "index"), + ((12, 30), "roll"), + ((12, 34), "hflex"), + ((12, 35), "flex"), + ((12, 36), "hflex1"), + ((12, 37), "flex1"), +] + + +def getIntEncoder(format): + if format == "cff": + twoByteOp = bytechr(28) + fourByteOp = bytechr(29) + elif format == "t1": + twoByteOp = None + fourByteOp = bytechr(255) + else: + assert format == "t2" + twoByteOp = bytechr(28) + fourByteOp = None + + def encodeInt( + value, + fourByteOp=fourByteOp, + bytechr=bytechr, + pack=struct.pack, + unpack=struct.unpack, + twoByteOp=twoByteOp, + ): + if -107 <= value <= 107: + code = bytechr(value + 139) + elif 108 <= value <= 1131: + value = value - 108 + code = bytechr((value >> 8) + 247) + bytechr(value & 0xFF) + elif -1131 <= value <= -108: + value = -value - 108 + code = bytechr((value >> 8) + 251) + bytechr(value & 0xFF) + elif twoByteOp is not None and -32768 <= value <= 32767: + code = twoByteOp + pack(">h", value) + elif fourByteOp is None: + # Backwards compatible hack: due to a previous bug in FontTools, + # 16.16 fixed numbers were written out as 4-byte ints. When + # these numbers were small, they were wrongly written back as + # small ints instead of 4-byte ints, breaking round-tripping. + # This here workaround doesn't do it any better, since we can't + # distinguish anymore between small ints that were supposed to + # be small fixed numbers and small ints that were just small + # ints. Hence the warning. + log.warning( + "4-byte T2 number got passed to the " + "IntType handler. This should happen only when reading in " + "old XML files.\n" + ) + code = bytechr(255) + pack(">l", value) + else: + code = fourByteOp + pack(">l", value) + return code + + return encodeInt + + +encodeIntCFF = getIntEncoder("cff") +encodeIntT1 = getIntEncoder("t1") +encodeIntT2 = getIntEncoder("t2") + + +def encodeFixed(f, pack=struct.pack): + """For T2 only""" + value = floatToFixed(f, precisionBits=16) + if value & 0xFFFF == 0: # check if the fractional part is zero + return encodeIntT2(value >> 16) # encode only the integer part + else: + return b"\xff" + pack(">l", value) # encode the entire fixed point value + + +realZeroBytes = bytechr(30) + bytechr(0xF) + + +def encodeFloat(f): + # For CFF only, used in cffLib + if f == 0.0: # 0.0 == +0.0 == -0.0 + return realZeroBytes + # Note: 14 decimal digits seems to be the limitation for CFF real numbers + # in macOS. However, we use 8 here to match the implementation of AFDKO. + s = "%.8G" % f + if s[:2] == "0.": + s = s[1:] + elif s[:3] == "-0.": + s = "-" + s[2:] + elif s.endswith("000"): + significantDigits = s.rstrip("0") + s = "%sE%d" % (significantDigits, len(s) - len(significantDigits)) + else: + dotIndex = s.find(".") + eIndex = s.find("E") + if dotIndex != -1 and eIndex != -1: + integerPart = s[:dotIndex] + fractionalPart = s[dotIndex + 1 : eIndex] + exponent = int(s[eIndex + 1 :]) + newExponent = exponent - len(fractionalPart) + if newExponent == 1: + s = "%s%s0" % (integerPart, fractionalPart) + else: + s = "%s%sE%d" % (integerPart, fractionalPart, newExponent) + if s.startswith((".0", "-.0")): + sign, s = s.split(".", 1) + s = "%s%sE-%d" % (sign, s.lstrip("0"), len(s)) + nibbles = [] + while s: + c = s[0] + s = s[1:] + if c == "E": + c2 = s[:1] + if c2 == "-": + s = s[1:] + c = "E-" + elif c2 == "+": + s = s[1:] + if s.startswith("0"): + s = s[1:] + nibbles.append(realNibblesDict[c]) + nibbles.append(0xF) + if len(nibbles) % 2: + nibbles.append(0xF) + d = bytechr(30) + for i in range(0, len(nibbles), 2): + d = d + bytechr(nibbles[i] << 4 | nibbles[i + 1]) + return d + + +class CharStringCompileError(Exception): + pass + + +class SimpleT2Decompiler(object): + def __init__(self, localSubrs, globalSubrs, private=None, blender=None): + self.localSubrs = localSubrs + self.localBias = calcSubrBias(localSubrs) + self.globalSubrs = globalSubrs + self.globalBias = calcSubrBias(globalSubrs) + self.private = private + self.blender = blender + self.reset() + + def reset(self): + self.callingStack = [] + self.operandStack = [] + self.hintCount = 0 + self.hintMaskBytes = 0 + self.numRegions = 0 + self.vsIndex = 0 + + def execute(self, charString): + self.callingStack.append(charString) + needsDecompilation = charString.needsDecompilation() + if needsDecompilation: + program = [] + pushToProgram = program.append + else: + pushToProgram = lambda x: None + pushToStack = self.operandStack.append + index = 0 + while True: + token, isOperator, index = charString.getToken(index) + if token is None: + break # we're done! + pushToProgram(token) + if isOperator: + handlerName = "op_" + token + handler = getattr(self, handlerName, None) + if handler is not None: + rv = handler(index) + if rv: + hintMaskBytes, index = rv + pushToProgram(hintMaskBytes) + else: + self.popall() + else: + pushToStack(token) + if needsDecompilation: + charString.setProgram(program) + del self.callingStack[-1] + + def pop(self): + value = self.operandStack[-1] + del self.operandStack[-1] + return value + + def popall(self): + stack = self.operandStack[:] + self.operandStack[:] = [] + return stack + + def push(self, value): + self.operandStack.append(value) + + def op_return(self, index): + if self.operandStack: + pass + + def op_endchar(self, index): + pass + + def op_ignore(self, index): + pass + + def op_callsubr(self, index): + subrIndex = self.pop() + subr = self.localSubrs[subrIndex + self.localBias] + self.execute(subr) + + def op_callgsubr(self, index): + subrIndex = self.pop() + subr = self.globalSubrs[subrIndex + self.globalBias] + self.execute(subr) + + def op_hstem(self, index): + self.countHints() + + def op_vstem(self, index): + self.countHints() + + def op_hstemhm(self, index): + self.countHints() + + def op_vstemhm(self, index): + self.countHints() + + def op_hintmask(self, index): + if not self.hintMaskBytes: + self.countHints() + self.hintMaskBytes = (self.hintCount + 7) // 8 + hintMaskBytes, index = self.callingStack[-1].getBytes(index, self.hintMaskBytes) + return hintMaskBytes, index + + op_cntrmask = op_hintmask + + def countHints(self): + args = self.popall() + self.hintCount = self.hintCount + len(args) // 2 + + # misc + def op_and(self, index): + raise NotImplementedError + + def op_or(self, index): + raise NotImplementedError + + def op_not(self, index): + raise NotImplementedError + + def op_store(self, index): + raise NotImplementedError + + def op_abs(self, index): + raise NotImplementedError + + def op_add(self, index): + raise NotImplementedError + + def op_sub(self, index): + raise NotImplementedError + + def op_div(self, index): + raise NotImplementedError + + def op_load(self, index): + raise NotImplementedError + + def op_neg(self, index): + raise NotImplementedError + + def op_eq(self, index): + raise NotImplementedError + + def op_drop(self, index): + raise NotImplementedError + + def op_put(self, index): + raise NotImplementedError + + def op_get(self, index): + raise NotImplementedError + + def op_ifelse(self, index): + raise NotImplementedError + + def op_random(self, index): + raise NotImplementedError + + def op_mul(self, index): + raise NotImplementedError + + def op_sqrt(self, index): + raise NotImplementedError + + def op_dup(self, index): + raise NotImplementedError + + def op_exch(self, index): + raise NotImplementedError + + def op_index(self, index): + raise NotImplementedError + + def op_roll(self, index): + raise NotImplementedError + + def op_blend(self, index): + if self.numRegions == 0: + self.numRegions = self.private.getNumRegions() + numBlends = self.pop() + numOps = numBlends * (self.numRegions + 1) + if self.blender is None: + del self.operandStack[ + -(numOps - numBlends) : + ] # Leave the default operands on the stack. + else: + argi = len(self.operandStack) - numOps + end_args = tuplei = argi + numBlends + while argi < end_args: + next_ti = tuplei + self.numRegions + deltas = self.operandStack[tuplei:next_ti] + delta = self.blender(self.vsIndex, deltas) + self.operandStack[argi] += delta + tuplei = next_ti + argi += 1 + self.operandStack[end_args:] = [] + + def op_vsindex(self, index): + vi = self.pop() + self.vsIndex = vi + self.numRegions = self.private.getNumRegions(vi) + + +t1Operators = [ + # opcode name + (1, "hstem"), + (3, "vstem"), + (4, "vmoveto"), + (5, "rlineto"), + (6, "hlineto"), + (7, "vlineto"), + (8, "rrcurveto"), + (9, "closepath"), + (10, "callsubr"), + (11, "return"), + (13, "hsbw"), + (14, "endchar"), + (21, "rmoveto"), + (22, "hmoveto"), + (30, "vhcurveto"), + (31, "hvcurveto"), + ((12, 0), "dotsection"), + ((12, 1), "vstem3"), + ((12, 2), "hstem3"), + ((12, 6), "seac"), + ((12, 7), "sbw"), + ((12, 12), "div"), + ((12, 16), "callothersubr"), + ((12, 17), "pop"), + ((12, 33), "setcurrentpoint"), +] + + +class T2WidthExtractor(SimpleT2Decompiler): + def __init__( + self, + localSubrs, + globalSubrs, + nominalWidthX, + defaultWidthX, + private=None, + blender=None, + ): + SimpleT2Decompiler.__init__(self, localSubrs, globalSubrs, private, blender) + self.nominalWidthX = nominalWidthX + self.defaultWidthX = defaultWidthX + + def reset(self): + SimpleT2Decompiler.reset(self) + self.gotWidth = 0 + self.width = 0 + + def popallWidth(self, evenOdd=0): + args = self.popall() + if not self.gotWidth: + if evenOdd ^ (len(args) % 2): + # For CFF2 charstrings, this should never happen + assert ( + self.defaultWidthX is not None + ), "CFF2 CharStrings must not have an initial width value" + self.width = self.nominalWidthX + args[0] + args = args[1:] + else: + self.width = self.defaultWidthX + self.gotWidth = 1 + return args + + def countHints(self): + args = self.popallWidth() + self.hintCount = self.hintCount + len(args) // 2 + + def op_rmoveto(self, index): + self.popallWidth() + + def op_hmoveto(self, index): + self.popallWidth(1) + + def op_vmoveto(self, index): + self.popallWidth(1) + + def op_endchar(self, index): + self.popallWidth() + + +class T2OutlineExtractor(T2WidthExtractor): + def __init__( + self, + pen, + localSubrs, + globalSubrs, + nominalWidthX, + defaultWidthX, + private=None, + blender=None, + ): + T2WidthExtractor.__init__( + self, + localSubrs, + globalSubrs, + nominalWidthX, + defaultWidthX, + private, + blender, + ) + self.pen = pen + self.subrLevel = 0 + + def reset(self): + T2WidthExtractor.reset(self) + self.currentPoint = (0, 0) + self.sawMoveTo = 0 + self.subrLevel = 0 + + def execute(self, charString): + self.subrLevel += 1 + super().execute(charString) + self.subrLevel -= 1 + if self.subrLevel == 0: + self.endPath() + + def _nextPoint(self, point): + x, y = self.currentPoint + point = x + point[0], y + point[1] + self.currentPoint = point + return point + + def rMoveTo(self, point): + self.pen.moveTo(self._nextPoint(point)) + self.sawMoveTo = 1 + + def rLineTo(self, point): + if not self.sawMoveTo: + self.rMoveTo((0, 0)) + self.pen.lineTo(self._nextPoint(point)) + + def rCurveTo(self, pt1, pt2, pt3): + if not self.sawMoveTo: + self.rMoveTo((0, 0)) + nextPoint = self._nextPoint + self.pen.curveTo(nextPoint(pt1), nextPoint(pt2), nextPoint(pt3)) + + def closePath(self): + if self.sawMoveTo: + self.pen.closePath() + self.sawMoveTo = 0 + + def endPath(self): + # In T2 there are no open paths, so always do a closePath when + # finishing a sub path. We avoid spurious calls to closePath() + # because its a real T1 op we're emulating in T2 whereas + # endPath() is just a means to that emulation + if self.sawMoveTo: + self.closePath() + + # + # hint operators + # + # def op_hstem(self, index): + # self.countHints() + # def op_vstem(self, index): + # self.countHints() + # def op_hstemhm(self, index): + # self.countHints() + # def op_vstemhm(self, index): + # self.countHints() + # def op_hintmask(self, index): + # self.countHints() + # def op_cntrmask(self, index): + # self.countHints() + + # + # path constructors, moveto + # + def op_rmoveto(self, index): + self.endPath() + self.rMoveTo(self.popallWidth()) + + def op_hmoveto(self, index): + self.endPath() + self.rMoveTo((self.popallWidth(1)[0], 0)) + + def op_vmoveto(self, index): + self.endPath() + self.rMoveTo((0, self.popallWidth(1)[0])) + + def op_endchar(self, index): + self.endPath() + args = self.popallWidth() + if args: + from fontTools.encodings.StandardEncoding import StandardEncoding + + # endchar can do seac accent bulding; The T2 spec says it's deprecated, + # but recent software that shall remain nameless does output it. + adx, ady, bchar, achar = args + baseGlyph = StandardEncoding[bchar] + self.pen.addComponent(baseGlyph, (1, 0, 0, 1, 0, 0)) + accentGlyph = StandardEncoding[achar] + self.pen.addComponent(accentGlyph, (1, 0, 0, 1, adx, ady)) + + # + # path constructors, lines + # + def op_rlineto(self, index): + args = self.popall() + for i in range(0, len(args), 2): + point = args[i : i + 2] + self.rLineTo(point) + + def op_hlineto(self, index): + self.alternatingLineto(1) + + def op_vlineto(self, index): + self.alternatingLineto(0) + + # + # path constructors, curves + # + def op_rrcurveto(self, index): + """{dxa dya dxb dyb dxc dyc}+ rrcurveto""" + args = self.popall() + for i in range(0, len(args), 6): + ( + dxa, + dya, + dxb, + dyb, + dxc, + dyc, + ) = args[i : i + 6] + self.rCurveTo((dxa, dya), (dxb, dyb), (dxc, dyc)) + + def op_rcurveline(self, index): + """{dxa dya dxb dyb dxc dyc}+ dxd dyd rcurveline""" + args = self.popall() + for i in range(0, len(args) - 2, 6): + dxb, dyb, dxc, dyc, dxd, dyd = args[i : i + 6] + self.rCurveTo((dxb, dyb), (dxc, dyc), (dxd, dyd)) + self.rLineTo(args[-2:]) + + def op_rlinecurve(self, index): + """{dxa dya}+ dxb dyb dxc dyc dxd dyd rlinecurve""" + args = self.popall() + lineArgs = args[:-6] + for i in range(0, len(lineArgs), 2): + self.rLineTo(lineArgs[i : i + 2]) + dxb, dyb, dxc, dyc, dxd, dyd = args[-6:] + self.rCurveTo((dxb, dyb), (dxc, dyc), (dxd, dyd)) + + def op_vvcurveto(self, index): + "dx1? {dya dxb dyb dyc}+ vvcurveto" + args = self.popall() + if len(args) % 2: + dx1 = args[0] + args = args[1:] + else: + dx1 = 0 + for i in range(0, len(args), 4): + dya, dxb, dyb, dyc = args[i : i + 4] + self.rCurveTo((dx1, dya), (dxb, dyb), (0, dyc)) + dx1 = 0 + + def op_hhcurveto(self, index): + """dy1? {dxa dxb dyb dxc}+ hhcurveto""" + args = self.popall() + if len(args) % 2: + dy1 = args[0] + args = args[1:] + else: + dy1 = 0 + for i in range(0, len(args), 4): + dxa, dxb, dyb, dxc = args[i : i + 4] + self.rCurveTo((dxa, dy1), (dxb, dyb), (dxc, 0)) + dy1 = 0 + + def op_vhcurveto(self, index): + """dy1 dx2 dy2 dx3 {dxa dxb dyb dyc dyd dxe dye dxf}* dyf? vhcurveto (30) + {dya dxb dyb dxc dxd dxe dye dyf}+ dxf? vhcurveto + """ + args = self.popall() + while args: + args = self.vcurveto(args) + if args: + args = self.hcurveto(args) + + def op_hvcurveto(self, index): + """dx1 dx2 dy2 dy3 {dya dxb dyb dxc dxd dxe dye dyf}* dxf? + {dxa dxb dyb dyc dyd dxe dye dxf}+ dyf? + """ + args = self.popall() + while args: + args = self.hcurveto(args) + if args: + args = self.vcurveto(args) + + # + # path constructors, flex + # + def op_hflex(self, index): + dx1, dx2, dy2, dx3, dx4, dx5, dx6 = self.popall() + dy1 = dy3 = dy4 = dy6 = 0 + dy5 = -dy2 + self.rCurveTo((dx1, dy1), (dx2, dy2), (dx3, dy3)) + self.rCurveTo((dx4, dy4), (dx5, dy5), (dx6, dy6)) + + def op_flex(self, index): + dx1, dy1, dx2, dy2, dx3, dy3, dx4, dy4, dx5, dy5, dx6, dy6, fd = self.popall() + self.rCurveTo((dx1, dy1), (dx2, dy2), (dx3, dy3)) + self.rCurveTo((dx4, dy4), (dx5, dy5), (dx6, dy6)) + + def op_hflex1(self, index): + dx1, dy1, dx2, dy2, dx3, dx4, dx5, dy5, dx6 = self.popall() + dy3 = dy4 = 0 + dy6 = -(dy1 + dy2 + dy3 + dy4 + dy5) + + self.rCurveTo((dx1, dy1), (dx2, dy2), (dx3, dy3)) + self.rCurveTo((dx4, dy4), (dx5, dy5), (dx6, dy6)) + + def op_flex1(self, index): + dx1, dy1, dx2, dy2, dx3, dy3, dx4, dy4, dx5, dy5, d6 = self.popall() + dx = dx1 + dx2 + dx3 + dx4 + dx5 + dy = dy1 + dy2 + dy3 + dy4 + dy5 + if abs(dx) > abs(dy): + dx6 = d6 + dy6 = -dy + else: + dx6 = -dx + dy6 = d6 + self.rCurveTo((dx1, dy1), (dx2, dy2), (dx3, dy3)) + self.rCurveTo((dx4, dy4), (dx5, dy5), (dx6, dy6)) + + # misc + def op_and(self, index): + raise NotImplementedError + + def op_or(self, index): + raise NotImplementedError + + def op_not(self, index): + raise NotImplementedError + + def op_store(self, index): + raise NotImplementedError + + def op_abs(self, index): + raise NotImplementedError + + def op_add(self, index): + raise NotImplementedError + + def op_sub(self, index): + raise NotImplementedError + + def op_div(self, index): + num2 = self.pop() + num1 = self.pop() + d1 = num1 // num2 + d2 = num1 / num2 + if d1 == d2: + self.push(d1) + else: + self.push(d2) + + def op_load(self, index): + raise NotImplementedError + + def op_neg(self, index): + raise NotImplementedError + + def op_eq(self, index): + raise NotImplementedError + + def op_drop(self, index): + raise NotImplementedError + + def op_put(self, index): + raise NotImplementedError + + def op_get(self, index): + raise NotImplementedError + + def op_ifelse(self, index): + raise NotImplementedError + + def op_random(self, index): + raise NotImplementedError + + def op_mul(self, index): + raise NotImplementedError + + def op_sqrt(self, index): + raise NotImplementedError + + def op_dup(self, index): + raise NotImplementedError + + def op_exch(self, index): + raise NotImplementedError + + def op_index(self, index): + raise NotImplementedError + + def op_roll(self, index): + raise NotImplementedError + + # + # miscellaneous helpers + # + def alternatingLineto(self, isHorizontal): + args = self.popall() + for arg in args: + if isHorizontal: + point = (arg, 0) + else: + point = (0, arg) + self.rLineTo(point) + isHorizontal = not isHorizontal + + def vcurveto(self, args): + dya, dxb, dyb, dxc = args[:4] + args = args[4:] + if len(args) == 1: + dyc = args[0] + args = [] + else: + dyc = 0 + self.rCurveTo((0, dya), (dxb, dyb), (dxc, dyc)) + return args + + def hcurveto(self, args): + dxa, dxb, dyb, dyc = args[:4] + args = args[4:] + if len(args) == 1: + dxc = args[0] + args = [] + else: + dxc = 0 + self.rCurveTo((dxa, 0), (dxb, dyb), (dxc, dyc)) + return args + + +class T1OutlineExtractor(T2OutlineExtractor): + def __init__(self, pen, subrs): + self.pen = pen + self.subrs = subrs + self.reset() + + def reset(self): + self.flexing = 0 + self.width = 0 + self.sbx = 0 + T2OutlineExtractor.reset(self) + + def endPath(self): + if self.sawMoveTo: + self.pen.endPath() + self.sawMoveTo = 0 + + def popallWidth(self, evenOdd=0): + return self.popall() + + def exch(self): + stack = self.operandStack + stack[-1], stack[-2] = stack[-2], stack[-1] + + # + # path constructors + # + def op_rmoveto(self, index): + if self.flexing: + return + self.endPath() + self.rMoveTo(self.popall()) + + def op_hmoveto(self, index): + if self.flexing: + # We must add a parameter to the stack if we are flexing + self.push(0) + return + self.endPath() + self.rMoveTo((self.popall()[0], 0)) + + def op_vmoveto(self, index): + if self.flexing: + # We must add a parameter to the stack if we are flexing + self.push(0) + self.exch() + return + self.endPath() + self.rMoveTo((0, self.popall()[0])) + + def op_closepath(self, index): + self.closePath() + + def op_setcurrentpoint(self, index): + args = self.popall() + x, y = args + self.currentPoint = x, y + + def op_endchar(self, index): + self.endPath() + + def op_hsbw(self, index): + sbx, wx = self.popall() + self.width = wx + self.sbx = sbx + self.currentPoint = sbx, self.currentPoint[1] + + def op_sbw(self, index): + self.popall() # XXX + + # + def op_callsubr(self, index): + subrIndex = self.pop() + subr = self.subrs[subrIndex] + self.execute(subr) + + def op_callothersubr(self, index): + subrIndex = self.pop() + nArgs = self.pop() + # print nArgs, subrIndex, "callothersubr" + if subrIndex == 0 and nArgs == 3: + self.doFlex() + self.flexing = 0 + elif subrIndex == 1 and nArgs == 0: + self.flexing = 1 + # ignore... + + def op_pop(self, index): + pass # ignore... + + def doFlex(self): + finaly = self.pop() + finalx = self.pop() + self.pop() # flex height is unused + + p3y = self.pop() + p3x = self.pop() + bcp4y = self.pop() + bcp4x = self.pop() + bcp3y = self.pop() + bcp3x = self.pop() + p2y = self.pop() + p2x = self.pop() + bcp2y = self.pop() + bcp2x = self.pop() + bcp1y = self.pop() + bcp1x = self.pop() + rpy = self.pop() + rpx = self.pop() + + # call rrcurveto + self.push(bcp1x + rpx) + self.push(bcp1y + rpy) + self.push(bcp2x) + self.push(bcp2y) + self.push(p2x) + self.push(p2y) + self.op_rrcurveto(None) + + # call rrcurveto + self.push(bcp3x) + self.push(bcp3y) + self.push(bcp4x) + self.push(bcp4y) + self.push(p3x) + self.push(p3y) + self.op_rrcurveto(None) + + # Push back final coords so subr 0 can find them + self.push(finalx) + self.push(finaly) + + def op_dotsection(self, index): + self.popall() # XXX + + def op_hstem3(self, index): + self.popall() # XXX + + def op_seac(self, index): + "asb adx ady bchar achar seac" + from fontTools.encodings.StandardEncoding import StandardEncoding + + asb, adx, ady, bchar, achar = self.popall() + baseGlyph = StandardEncoding[bchar] + self.pen.addComponent(baseGlyph, (1, 0, 0, 1, 0, 0)) + accentGlyph = StandardEncoding[achar] + adx = adx + self.sbx - asb # seac weirdness + self.pen.addComponent(accentGlyph, (1, 0, 0, 1, adx, ady)) + + def op_vstem3(self, index): + self.popall() # XXX + + +class T2CharString(object): + operandEncoding = t2OperandEncoding + operators, opcodes = buildOperatorDict(t2Operators) + decompilerClass = SimpleT2Decompiler + outlineExtractor = T2OutlineExtractor + + def __init__(self, bytecode=None, program=None, private=None, globalSubrs=None): + if program is None: + program = [] + self.bytecode = bytecode + self.program = program + self.private = private + self.globalSubrs = globalSubrs if globalSubrs is not None else [] + self._cur_vsindex = None + + def getNumRegions(self, vsindex=None): + pd = self.private + assert pd is not None + if vsindex is not None: + self._cur_vsindex = vsindex + elif self._cur_vsindex is None: + self._cur_vsindex = pd.vsindex if hasattr(pd, "vsindex") else 0 + return pd.getNumRegions(self._cur_vsindex) + + def __repr__(self): + if self.bytecode is None: + return "<%s (source) at %x>" % (self.__class__.__name__, id(self)) + else: + return "<%s (bytecode) at %x>" % (self.__class__.__name__, id(self)) + + def getIntEncoder(self): + return encodeIntT2 + + def getFixedEncoder(self): + return encodeFixed + + def decompile(self): + if not self.needsDecompilation(): + return + subrs = getattr(self.private, "Subrs", []) + decompiler = self.decompilerClass(subrs, self.globalSubrs, self.private) + decompiler.execute(self) + + def draw(self, pen, blender=None): + subrs = getattr(self.private, "Subrs", []) + extractor = self.outlineExtractor( + pen, + subrs, + self.globalSubrs, + self.private.nominalWidthX, + self.private.defaultWidthX, + self.private, + blender, + ) + extractor.execute(self) + self.width = extractor.width + + def calcBounds(self, glyphSet): + boundsPen = BoundsPen(glyphSet) + self.draw(boundsPen) + return boundsPen.bounds + + def compile(self, isCFF2=False): + if self.bytecode is not None: + return + opcodes = self.opcodes + program = self.program + + if isCFF2: + # If present, remove return and endchar operators. + if program and program[-1] in ("return", "endchar"): + program = program[:-1] + elif program and not isinstance(program[-1], str): + raise CharStringCompileError( + "T2CharString or Subr has items on the stack after last operator." + ) + + bytecode = [] + encodeInt = self.getIntEncoder() + encodeFixed = self.getFixedEncoder() + i = 0 + end = len(program) + while i < end: + token = program[i] + i = i + 1 + if isinstance(token, str): + try: + bytecode.extend(bytechr(b) for b in opcodes[token]) + except KeyError: + raise CharStringCompileError("illegal operator: %s" % token) + if token in ("hintmask", "cntrmask"): + bytecode.append(program[i]) # hint mask + i = i + 1 + elif isinstance(token, int): + bytecode.append(encodeInt(token)) + elif isinstance(token, float): + bytecode.append(encodeFixed(token)) + else: + assert 0, "unsupported type: %s" % type(token) + try: + bytecode = bytesjoin(bytecode) + except TypeError: + log.error(bytecode) + raise + self.setBytecode(bytecode) + + def needsDecompilation(self): + return self.bytecode is not None + + def setProgram(self, program): + self.program = program + self.bytecode = None + + def setBytecode(self, bytecode): + self.bytecode = bytecode + self.program = None + + def getToken(self, index, len=len, byteord=byteord, isinstance=isinstance): + if self.bytecode is not None: + if index >= len(self.bytecode): + return None, 0, 0 + b0 = byteord(self.bytecode[index]) + index = index + 1 + handler = self.operandEncoding[b0] + token, index = handler(self, b0, self.bytecode, index) + else: + if index >= len(self.program): + return None, 0, 0 + token = self.program[index] + index = index + 1 + isOperator = isinstance(token, str) + return token, isOperator, index + + def getBytes(self, index, nBytes): + if self.bytecode is not None: + newIndex = index + nBytes + bytes = self.bytecode[index:newIndex] + index = newIndex + else: + bytes = self.program[index] + index = index + 1 + assert len(bytes) == nBytes + return bytes, index + + def handle_operator(self, operator): + return operator + + def toXML(self, xmlWriter, ttFont=None): + from fontTools.misc.textTools import num2binary + + if self.bytecode is not None: + xmlWriter.dumphex(self.bytecode) + else: + index = 0 + args = [] + while True: + token, isOperator, index = self.getToken(index) + if token is None: + break + if isOperator: + if token in ("hintmask", "cntrmask"): + hintMask, isOperator, index = self.getToken(index) + bits = [] + for byte in hintMask: + bits.append(num2binary(byteord(byte), 8)) + hintMask = strjoin(bits) + line = " ".join(args + [token, hintMask]) + else: + line = " ".join(args + [token]) + xmlWriter.write(line) + xmlWriter.newline() + args = [] + else: + if isinstance(token, float): + token = floatToFixedToStr(token, precisionBits=16) + else: + token = str(token) + args.append(token) + if args: + # NOTE: only CFF2 charstrings/subrs can have numeric arguments on + # the stack after the last operator. Compiling this would fail if + # this is part of CFF 1.0 table. + line = " ".join(args) + xmlWriter.write(line) + + def fromXML(self, name, attrs, content): + from fontTools.misc.textTools import binary2num, readHex + + if attrs.get("raw"): + self.setBytecode(readHex(content)) + return + content = strjoin(content) + content = content.split() + program = [] + end = len(content) + i = 0 + while i < end: + token = content[i] + i = i + 1 + try: + token = int(token) + except ValueError: + try: + token = strToFixedToFloat(token, precisionBits=16) + except ValueError: + program.append(token) + if token in ("hintmask", "cntrmask"): + mask = content[i] + maskBytes = b"" + for j in range(0, len(mask), 8): + maskBytes = maskBytes + bytechr(binary2num(mask[j : j + 8])) + program.append(maskBytes) + i = i + 1 + else: + program.append(token) + else: + program.append(token) + self.setProgram(program) + + +class T1CharString(T2CharString): + operandEncoding = t1OperandEncoding + operators, opcodes = buildOperatorDict(t1Operators) + + def __init__(self, bytecode=None, program=None, subrs=None): + super().__init__(bytecode, program) + self.subrs = subrs + + def getIntEncoder(self): + return encodeIntT1 + + def getFixedEncoder(self): + def encodeFixed(value): + raise TypeError("Type 1 charstrings don't support floating point operands") + + def decompile(self): + if self.bytecode is None: + return + program = [] + index = 0 + while True: + token, isOperator, index = self.getToken(index) + if token is None: + break + program.append(token) + self.setProgram(program) + + def draw(self, pen): + extractor = T1OutlineExtractor(pen, self.subrs) + extractor.execute(self) + self.width = extractor.width + + +class DictDecompiler(object): + operandEncoding = cffDictOperandEncoding + + def __init__(self, strings, parent=None): + self.stack = [] + self.strings = strings + self.dict = {} + self.parent = parent + + def getDict(self): + assert len(self.stack) == 0, "non-empty stack" + return self.dict + + def decompile(self, data): + index = 0 + lenData = len(data) + push = self.stack.append + while index < lenData: + b0 = byteord(data[index]) + index = index + 1 + handler = self.operandEncoding[b0] + value, index = handler(self, b0, data, index) + if value is not None: + push(value) + + def pop(self): + value = self.stack[-1] + del self.stack[-1] + return value + + def popall(self): + args = self.stack[:] + del self.stack[:] + return args + + def handle_operator(self, operator): + operator, argType = operator + if isinstance(argType, tuple): + value = () + for i in range(len(argType) - 1, -1, -1): + arg = argType[i] + arghandler = getattr(self, "arg_" + arg) + value = (arghandler(operator),) + value + else: + arghandler = getattr(self, "arg_" + argType) + value = arghandler(operator) + if operator == "blend": + self.stack.extend(value) + else: + self.dict[operator] = value + + def arg_number(self, name): + if isinstance(self.stack[0], list): + out = self.arg_blend_number(self.stack) + else: + out = self.pop() + return out + + def arg_blend_number(self, name): + out = [] + blendArgs = self.pop() + numMasters = len(blendArgs) + out.append(blendArgs) + out.append("blend") + dummy = self.popall() + return blendArgs + + def arg_SID(self, name): + return self.strings[self.pop()] + + def arg_array(self, name): + return self.popall() + + def arg_blendList(self, name): + """ + There may be non-blend args at the top of the stack. We first calculate + where the blend args start in the stack. These are the last + numMasters*numBlends) +1 args. + The blend args starts with numMasters relative coordinate values, the BlueValues in the list from the default master font. This is followed by + numBlends list of values. Each of value in one of these lists is the + Variable Font delta for the matching region. + + We re-arrange this to be a list of numMaster entries. Each entry starts with the corresponding default font relative value, and is followed by + the delta values. We then convert the default values, the first item in each entry, to an absolute value. + """ + vsindex = self.dict.get("vsindex", 0) + numMasters = ( + self.parent.getNumRegions(vsindex) + 1 + ) # only a PrivateDict has blended ops. + numBlends = self.pop() + args = self.popall() + numArgs = len(args) + # The spec says that there should be no non-blended Blue Values,. + assert numArgs == numMasters * numBlends + value = [None] * numBlends + numDeltas = numMasters - 1 + i = 0 + prevVal = 0 + while i < numBlends: + newVal = args[i] + prevVal + prevVal = newVal + masterOffset = numBlends + (i * numDeltas) + blendList = [newVal] + args[masterOffset : masterOffset + numDeltas] + value[i] = blendList + i += 1 + return value + + def arg_delta(self, name): + valueList = self.popall() + out = [] + if valueList and isinstance(valueList[0], list): + # arg_blendList() has already converted these to absolute values. + out = valueList + else: + current = 0 + for v in valueList: + current = current + v + out.append(current) + return out + + +def calcSubrBias(subrs): + nSubrs = len(subrs) + if nSubrs < 1240: + bias = 107 + elif nSubrs < 33900: + bias = 1131 + else: + bias = 32768 + return bias diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psLib.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psLib.py new file mode 100644 index 0000000000000000000000000000000000000000..3bfdb4ae9fcc0c49b77830d2be8c46274e315bd4 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psLib.py @@ -0,0 +1,398 @@ +from fontTools.misc.textTools import bytechr, byteord, bytesjoin, tobytes, tostr +from fontTools.misc import eexec +from .psOperators import ( + PSOperators, + ps_StandardEncoding, + ps_array, + ps_boolean, + ps_dict, + ps_integer, + ps_literal, + ps_mark, + ps_name, + ps_operator, + ps_procedure, + ps_procmark, + ps_real, + ps_string, +) +import re +from collections.abc import Callable +from string import whitespace +import logging + + +log = logging.getLogger(__name__) + +ps_special = b"()<>[]{}%" # / is one too, but we take care of that one differently + +skipwhiteRE = re.compile(bytesjoin([b"[", whitespace, b"]*"])) +endofthingPat = bytesjoin([b"[^][(){}<>/%", whitespace, b"]*"]) +endofthingRE = re.compile(endofthingPat) +commentRE = re.compile(b"%[^\n\r]*") + +# XXX This not entirely correct as it doesn't allow *nested* embedded parens: +stringPat = rb""" + \( + ( + ( + [^()]* \ [()] + ) + | + ( + [^()]* \( [^()]* \) + ) + )* + [^()]* + \) +""" +stringPat = b"".join(stringPat.split()) +stringRE = re.compile(stringPat) + +hexstringRE = re.compile(bytesjoin([b"<[", whitespace, b"0-9A-Fa-f]*>"])) + + +class PSTokenError(Exception): + pass + + +class PSError(Exception): + pass + + +class PSTokenizer(object): + def __init__(self, buf=b"", encoding="ascii"): + # Force self.buf to be a byte string + buf = tobytes(buf) + self.buf = buf + self.len = len(buf) + self.pos = 0 + self.closed = False + self.encoding = encoding + + def read(self, n=-1): + """Read at most 'n' bytes from the buffer, or less if the read + hits EOF before obtaining 'n' bytes. + If 'n' is negative or omitted, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + if n is None or n < 0: + newpos = self.len + else: + newpos = min(self.pos + n, self.len) + r = self.buf[self.pos : newpos] + self.pos = newpos + return r + + def close(self): + if not self.closed: + self.closed = True + del self.buf, self.pos + + def getnexttoken( + self, + # localize some stuff, for performance + len=len, + ps_special=ps_special, + stringmatch=stringRE.match, + hexstringmatch=hexstringRE.match, + commentmatch=commentRE.match, + endmatch=endofthingRE.match, + ): + self.skipwhite() + if self.pos >= self.len: + return None, None + pos = self.pos + buf = self.buf + char = bytechr(byteord(buf[pos])) + if char in ps_special: + if char in b"{}[]": + tokentype = "do_special" + token = char + elif char == b"%": + tokentype = "do_comment" + _, nextpos = commentmatch(buf, pos).span() + token = buf[pos:nextpos] + elif char == b"(": + tokentype = "do_string" + m = stringmatch(buf, pos) + if m is None: + raise PSTokenError("bad string at character %d" % pos) + _, nextpos = m.span() + token = buf[pos:nextpos] + elif char == b"<": + tokentype = "do_hexstring" + m = hexstringmatch(buf, pos) + if m is None: + raise PSTokenError("bad hexstring at character %d" % pos) + _, nextpos = m.span() + token = buf[pos:nextpos] + else: + raise PSTokenError("bad token at character %d" % pos) + else: + if char == b"/": + tokentype = "do_literal" + m = endmatch(buf, pos + 1) + else: + tokentype = "" + m = endmatch(buf, pos) + if m is None: + raise PSTokenError("bad token at character %d" % pos) + _, nextpos = m.span() + token = buf[pos:nextpos] + self.pos = pos + len(token) + token = tostr(token, encoding=self.encoding) + return tokentype, token + + def skipwhite(self, whitematch=skipwhiteRE.match): + _, nextpos = whitematch(self.buf, self.pos).span() + self.pos = nextpos + + def starteexec(self): + self.pos = self.pos + 1 + self.dirtybuf = self.buf[self.pos :] + self.buf, R = eexec.decrypt(self.dirtybuf, 55665) + self.len = len(self.buf) + self.pos = 4 + + def stopeexec(self): + if not hasattr(self, "dirtybuf"): + return + self.buf = self.dirtybuf + del self.dirtybuf + + +class PSInterpreter(PSOperators): + def __init__(self, encoding="ascii"): + systemdict = {} + userdict = {} + self.encoding = encoding + self.dictstack = [systemdict, userdict] + self.stack = [] + self.proclevel = 0 + self.procmark = ps_procmark() + self.fillsystemdict() + + def fillsystemdict(self): + systemdict = self.dictstack[0] + systemdict["["] = systemdict["mark"] = self.mark = ps_mark() + systemdict["]"] = ps_operator("]", self.do_makearray) + systemdict["true"] = ps_boolean(1) + systemdict["false"] = ps_boolean(0) + systemdict["StandardEncoding"] = ps_array(ps_StandardEncoding) + systemdict["FontDirectory"] = ps_dict({}) + self.suckoperators(systemdict, self.__class__) + + def suckoperators(self, systemdict, klass): + for name in dir(klass): + attr = getattr(self, name) + if isinstance(attr, Callable) and name[:3] == "ps_": + name = name[3:] + systemdict[name] = ps_operator(name, attr) + for baseclass in klass.__bases__: + self.suckoperators(systemdict, baseclass) + + def interpret(self, data, getattr=getattr): + tokenizer = self.tokenizer = PSTokenizer(data, self.encoding) + getnexttoken = tokenizer.getnexttoken + do_token = self.do_token + handle_object = self.handle_object + try: + while 1: + tokentype, token = getnexttoken() + if not token: + break + if tokentype: + handler = getattr(self, tokentype) + object = handler(token) + else: + object = do_token(token) + if object is not None: + handle_object(object) + tokenizer.close() + self.tokenizer = None + except: + if self.tokenizer is not None: + log.debug( + "ps error:\n" + "- - - - - - -\n" + "%s\n" + ">>>\n" + "%s\n" + "- - - - - - -", + self.tokenizer.buf[self.tokenizer.pos - 50 : self.tokenizer.pos], + self.tokenizer.buf[self.tokenizer.pos : self.tokenizer.pos + 50], + ) + raise + + def handle_object(self, object): + if not (self.proclevel or object.literal or object.type == "proceduretype"): + if object.type != "operatortype": + object = self.resolve_name(object.value) + if object.literal: + self.push(object) + else: + if object.type == "proceduretype": + self.call_procedure(object) + else: + object.function() + else: + self.push(object) + + def call_procedure(self, proc): + handle_object = self.handle_object + for item in proc.value: + handle_object(item) + + def resolve_name(self, name): + dictstack = self.dictstack + for i in range(len(dictstack) - 1, -1, -1): + if name in dictstack[i]: + return dictstack[i][name] + raise PSError("name error: " + str(name)) + + def do_token( + self, + token, + int=int, + float=float, + ps_name=ps_name, + ps_integer=ps_integer, + ps_real=ps_real, + ): + try: + num = int(token) + except (ValueError, OverflowError): + try: + num = float(token) + except (ValueError, OverflowError): + if "#" in token: + hashpos = token.find("#") + try: + base = int(token[:hashpos]) + num = int(token[hashpos + 1 :], base) + except (ValueError, OverflowError): + return ps_name(token) + else: + return ps_integer(num) + else: + return ps_name(token) + else: + return ps_real(num) + else: + return ps_integer(num) + + def do_comment(self, token): + pass + + def do_literal(self, token): + return ps_literal(token[1:]) + + def do_string(self, token): + return ps_string(token[1:-1]) + + def do_hexstring(self, token): + hexStr = "".join(token[1:-1].split()) + if len(hexStr) % 2: + hexStr = hexStr + "0" + cleanstr = [] + for i in range(0, len(hexStr), 2): + cleanstr.append(chr(int(hexStr[i : i + 2], 16))) + cleanstr = "".join(cleanstr) + return ps_string(cleanstr) + + def do_special(self, token): + if token == "{": + self.proclevel = self.proclevel + 1 + return self.procmark + elif token == "}": + proc = [] + while 1: + topobject = self.pop() + if topobject == self.procmark: + break + proc.append(topobject) + self.proclevel = self.proclevel - 1 + proc.reverse() + return ps_procedure(proc) + elif token == "[": + return self.mark + elif token == "]": + return ps_name("]") + else: + raise PSTokenError("huh?") + + def push(self, object): + self.stack.append(object) + + def pop(self, *types): + stack = self.stack + if not stack: + raise PSError("stack underflow") + object = stack[-1] + if types: + if object.type not in types: + raise PSError( + "typecheck, expected %s, found %s" % (repr(types), object.type) + ) + del stack[-1] + return object + + def do_makearray(self): + array = [] + while 1: + topobject = self.pop() + if topobject == self.mark: + break + array.append(topobject) + array.reverse() + self.push(ps_array(array)) + + def close(self): + """Remove circular references.""" + del self.stack + del self.dictstack + + +def unpack_item(item): + tp = type(item.value) + if tp == dict: + newitem = {} + for key, value in item.value.items(): + newitem[key] = unpack_item(value) + elif tp == list: + newitem = [None] * len(item.value) + for i in range(len(item.value)): + newitem[i] = unpack_item(item.value[i]) + if item.type == "proceduretype": + newitem = tuple(newitem) + else: + newitem = item.value + return newitem + + +def suckfont(data, encoding="ascii"): + m = re.search(rb"/FontName\s+/([^ \t\n\r]+)\s+def", data) + if m: + fontName = m.group(1) + fontName = fontName.decode() + else: + fontName = None + interpreter = PSInterpreter(encoding=encoding) + interpreter.interpret( + b"/Helvetica 4 dict dup /Encoding StandardEncoding put definefont pop" + ) + interpreter.interpret(data) + fontdir = interpreter.dictstack[0]["FontDirectory"].value + if fontName in fontdir: + rawfont = fontdir[fontName] + else: + # fall back, in case fontName wasn't found + fontNames = list(fontdir.keys()) + if len(fontNames) > 1: + fontNames.remove("Helvetica") + fontNames.sort() + rawfont = fontdir[fontNames[0]] + interpreter.close() + return unpack_item(rawfont) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/py23.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/py23.py new file mode 100644 index 0000000000000000000000000000000000000000..29f634d624b7df125722c3bae594c1d39a835aec --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/py23.py @@ -0,0 +1,96 @@ +"""Python 2/3 compat layer leftovers.""" + +import decimal as _decimal +import math as _math +import warnings +from contextlib import redirect_stderr, redirect_stdout +from io import BytesIO +from io import StringIO as UnicodeIO +from types import SimpleNamespace + +from .textTools import Tag, bytechr, byteord, bytesjoin, strjoin, tobytes, tostr + +warnings.warn( + "The py23 module has been deprecated and will be removed in a future release. " + "Please update your code.", + DeprecationWarning, +) + +__all__ = [ + "basestring", + "bytechr", + "byteord", + "BytesIO", + "bytesjoin", + "open", + "Py23Error", + "range", + "RecursionError", + "round", + "SimpleNamespace", + "StringIO", + "strjoin", + "Tag", + "tobytes", + "tostr", + "tounicode", + "unichr", + "unicode", + "UnicodeIO", + "xrange", + "zip", +] + + +class Py23Error(NotImplementedError): + pass + + +RecursionError = RecursionError +StringIO = UnicodeIO + +basestring = str +isclose = _math.isclose +isfinite = _math.isfinite +open = open +range = range +round = round3 = round +unichr = chr +unicode = str +zip = zip + +tounicode = tostr + + +def xrange(*args, **kwargs): + raise Py23Error("'xrange' is not defined. Use 'range' instead.") + + +def round2(number, ndigits=None): + """ + Implementation of Python 2 built-in round() function. + Rounds a number to a given precision in decimal digits (default + 0 digits). The result is a floating point number. Values are rounded + to the closest multiple of 10 to the power minus ndigits; if two + multiples are equally close, rounding is done away from 0. + ndigits may be negative. + See Python 2 documentation: + https://docs.python.org/2/library/functions.html?highlight=round#round + """ + if ndigits is None: + ndigits = 0 + + if ndigits < 0: + exponent = 10 ** (-ndigits) + quotient, remainder = divmod(number, exponent) + if remainder >= exponent // 2 and number >= 0: + quotient += 1 + return float(quotient * exponent) + else: + exponent = _decimal.Decimal("10") ** (-ndigits) + + d = _decimal.Decimal.from_float(number).quantize( + exponent, rounding=_decimal.ROUND_HALF_UP + ) + + return float(d) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/roundTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/roundTools.py new file mode 100644 index 0000000000000000000000000000000000000000..a4d45c31b2265cc5f705c39f41e952cc69514517 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/roundTools.py @@ -0,0 +1,110 @@ +""" +Various round-to-integer helpers. +""" + +import math +import functools +import logging + +log = logging.getLogger(__name__) + +__all__ = [ + "noRound", + "otRound", + "maybeRound", + "roundFunc", + "nearestMultipleShortestRepr", +] + + +def noRound(value): + return value + + +def otRound(value): + """Round float value to nearest integer towards ``+Infinity``. + + The OpenType spec (in the section on `"normalization" of OpenType Font Variations `_) + defines the required method for converting floating point values to + fixed-point. In particular it specifies the following rounding strategy: + + for fractional values of 0.5 and higher, take the next higher integer; + for other fractional values, truncate. + + This function rounds the floating-point value according to this strategy + in preparation for conversion to fixed-point. + + Args: + value (float): The input floating-point value. + + Returns + float: The rounded value. + """ + # See this thread for how we ended up with this implementation: + # https://github.com/fonttools/fonttools/issues/1248#issuecomment-383198166 + return int(math.floor(value + 0.5)) + + +def maybeRound(v, tolerance, round=otRound): + rounded = round(v) + return rounded if abs(rounded - v) <= tolerance else v + + +def roundFunc(tolerance, round=otRound): + if tolerance < 0: + raise ValueError("Rounding tolerance must be positive") + + if tolerance == 0: + return noRound + + if tolerance >= 0.5: + return round + + return functools.partial(maybeRound, tolerance=tolerance, round=round) + + +def nearestMultipleShortestRepr(value: float, factor: float) -> str: + """Round to nearest multiple of factor and return shortest decimal representation. + + This chooses the float that is closer to a multiple of the given factor while + having the shortest decimal representation (the least number of fractional decimal + digits). + + For example, given the following: + + >>> nearestMultipleShortestRepr(-0.61883544921875, 1.0/(1<<14)) + '-0.61884' + + Useful when you need to serialize or print a fixed-point number (or multiples + thereof, such as F2Dot14 fractions of 180 degrees in COLRv1 PaintRotate) in + a human-readable form. + + Args: + value (value): The value to be rounded and serialized. + factor (float): The value which the result is a close multiple of. + + Returns: + str: A compact string representation of the value. + """ + if not value: + return "0.0" + + value = otRound(value / factor) * factor + eps = 0.5 * factor + lo = value - eps + hi = value + eps + # If the range of valid choices spans an integer, return the integer. + if int(lo) != int(hi): + return str(float(round(value))) + + fmt = "%.8f" + lo = fmt % lo + hi = fmt % hi + assert len(lo) == len(hi) and lo != hi + for i in range(len(lo)): + if lo[i] != hi[i]: + break + period = lo.find(".") + assert period < i + fmt = "%%.%df" % (i - period) + return fmt % value diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/symfont.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/symfont.py new file mode 100644 index 0000000000000000000000000000000000000000..3a8819c7743432b8c611d87a1cb688831e55b93c --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/symfont.py @@ -0,0 +1,247 @@ +from fontTools.pens.basePen import BasePen +from functools import partial +from itertools import count +import sympy as sp +import sys + +n = 3 # Max Bezier degree; 3 for cubic, 2 for quadratic + +t, x, y = sp.symbols("t x y", real=True) +c = sp.symbols("c", real=False) # Complex representation instead of x/y + +X = tuple(sp.symbols("x:%d" % (n + 1), real=True)) +Y = tuple(sp.symbols("y:%d" % (n + 1), real=True)) +P = tuple(zip(*(sp.symbols("p:%d[%s]" % (n + 1, w), real=True) for w in "01"))) +C = tuple(sp.symbols("c:%d" % (n + 1), real=False)) + +# Cubic Bernstein basis functions +BinomialCoefficient = [(1, 0)] +for i in range(1, n + 1): + last = BinomialCoefficient[-1] + this = tuple(last[j - 1] + last[j] for j in range(len(last))) + (0,) + BinomialCoefficient.append(this) +BinomialCoefficient = tuple(tuple(item[:-1]) for item in BinomialCoefficient) +del last, this + +BernsteinPolynomial = tuple( + tuple(c * t**i * (1 - t) ** (n - i) for i, c in enumerate(coeffs)) + for n, coeffs in enumerate(BinomialCoefficient) +) + +BezierCurve = tuple( + tuple( + sum(P[i][j] * bernstein for i, bernstein in enumerate(bernsteins)) + for j in range(2) + ) + for n, bernsteins in enumerate(BernsteinPolynomial) +) +BezierCurveC = tuple( + sum(C[i] * bernstein for i, bernstein in enumerate(bernsteins)) + for n, bernsteins in enumerate(BernsteinPolynomial) +) + + +def green(f, curveXY): + f = -sp.integrate(sp.sympify(f), y) + f = f.subs({x: curveXY[0], y: curveXY[1]}) + f = sp.integrate(f * sp.diff(curveXY[0], t), (t, 0, 1)) + return f + + +class _BezierFuncsLazy(dict): + def __init__(self, symfunc): + self._symfunc = symfunc + self._bezfuncs = {} + + def __missing__(self, i): + args = ["p%d" % d for d in range(i + 1)] + f = green(self._symfunc, BezierCurve[i]) + f = sp.gcd_terms(f.collect(sum(P, ()))) # Optimize + return sp.lambdify(args, f) + + +class GreenPen(BasePen): + _BezierFuncs = {} + + @classmethod + def _getGreenBezierFuncs(celf, func): + funcstr = str(func) + if not funcstr in celf._BezierFuncs: + celf._BezierFuncs[funcstr] = _BezierFuncsLazy(func) + return celf._BezierFuncs[funcstr] + + def __init__(self, func, glyphset=None): + BasePen.__init__(self, glyphset) + self._funcs = self._getGreenBezierFuncs(func) + self.value = 0 + + def _moveTo(self, p0): + self._startPoint = p0 + + def _closePath(self): + p0 = self._getCurrentPoint() + if p0 != self._startPoint: + self._lineTo(self._startPoint) + + def _endPath(self): + p0 = self._getCurrentPoint() + if p0 != self._startPoint: + # Green theorem is not defined on open contours. + raise NotImplementedError + + def _lineTo(self, p1): + p0 = self._getCurrentPoint() + self.value += self._funcs[1](p0, p1) + + def _qCurveToOne(self, p1, p2): + p0 = self._getCurrentPoint() + self.value += self._funcs[2](p0, p1, p2) + + def _curveToOne(self, p1, p2, p3): + p0 = self._getCurrentPoint() + self.value += self._funcs[3](p0, p1, p2, p3) + + +# Sample pens. +# Do not use this in real code. +# Use fontTools.pens.momentsPen.MomentsPen instead. +AreaPen = partial(GreenPen, func=1) +MomentXPen = partial(GreenPen, func=x) +MomentYPen = partial(GreenPen, func=y) +MomentXXPen = partial(GreenPen, func=x * x) +MomentYYPen = partial(GreenPen, func=y * y) +MomentXYPen = partial(GreenPen, func=x * y) + + +def printGreenPen(penName, funcs, file=sys.stdout, docstring=None): + if docstring is not None: + print('"""%s"""' % docstring) + + print( + """from fontTools.pens.basePen import BasePen, OpenContourError +try: + import cython + + COMPILED = cython.compiled +except (AttributeError, ImportError): + # if cython not installed, use mock module with no-op decorators and types + from fontTools.misc import cython + + COMPILED = False + + +__all__ = ["%s"] + +class %s(BasePen): + + def __init__(self, glyphset=None): + BasePen.__init__(self, glyphset) +""" + % (penName, penName), + file=file, + ) + for name, f in funcs: + print(" self.%s = 0" % name, file=file) + print( + """ + def _moveTo(self, p0): + self._startPoint = p0 + + def _closePath(self): + p0 = self._getCurrentPoint() + if p0 != self._startPoint: + self._lineTo(self._startPoint) + + def _endPath(self): + p0 = self._getCurrentPoint() + if p0 != self._startPoint: + raise OpenContourError( + "Glyph statistics is not defined on open contours." + ) +""", + end="", + file=file, + ) + + for n in (1, 2, 3): + subs = {P[i][j]: [X, Y][j][i] for i in range(n + 1) for j in range(2)} + greens = [green(f, BezierCurve[n]) for name, f in funcs] + greens = [sp.gcd_terms(f.collect(sum(P, ()))) for f in greens] # Optimize + greens = [f.subs(subs) for f in greens] # Convert to p to x/y + defs, exprs = sp.cse( + greens, + optimizations="basic", + symbols=(sp.Symbol("r%d" % i) for i in count()), + ) + + print() + for name, value in defs: + print(" @cython.locals(%s=cython.double)" % name, file=file) + if n == 1: + print( + """\ + @cython.locals(x0=cython.double, y0=cython.double) + @cython.locals(x1=cython.double, y1=cython.double) + def _lineTo(self, p1): + x0,y0 = self._getCurrentPoint() + x1,y1 = p1 +""", + file=file, + ) + elif n == 2: + print( + """\ + @cython.locals(x0=cython.double, y0=cython.double) + @cython.locals(x1=cython.double, y1=cython.double) + @cython.locals(x2=cython.double, y2=cython.double) + def _qCurveToOne(self, p1, p2): + x0,y0 = self._getCurrentPoint() + x1,y1 = p1 + x2,y2 = p2 +""", + file=file, + ) + elif n == 3: + print( + """\ + @cython.locals(x0=cython.double, y0=cython.double) + @cython.locals(x1=cython.double, y1=cython.double) + @cython.locals(x2=cython.double, y2=cython.double) + @cython.locals(x3=cython.double, y3=cython.double) + def _curveToOne(self, p1, p2, p3): + x0,y0 = self._getCurrentPoint() + x1,y1 = p1 + x2,y2 = p2 + x3,y3 = p3 +""", + file=file, + ) + for name, value in defs: + print(" %s = %s" % (name, value), file=file) + + print(file=file) + for name, value in zip([f[0] for f in funcs], exprs): + print(" self.%s += %s" % (name, value), file=file) + + print( + """ +if __name__ == '__main__': + from fontTools.misc.symfont import x, y, printGreenPen + printGreenPen('%s', [""" + % penName, + file=file, + ) + for name, f in funcs: + print(" ('%s', %s)," % (name, str(f)), file=file) + print(" ])", file=file) + + +if __name__ == "__main__": + pen = AreaPen() + pen.moveTo((100, 100)) + pen.lineTo((100, 200)) + pen.lineTo((200, 200)) + pen.curveTo((200, 250), (300, 300), (250, 350)) + pen.lineTo((200, 100)) + pen.closePath() + print(pen.value) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/textTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/textTools.py new file mode 100644 index 0000000000000000000000000000000000000000..f5484a83aa7ac69fcc4ad42a50c097dbee1e162f --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/textTools.py @@ -0,0 +1,154 @@ +"""fontTools.misc.textTools.py -- miscellaneous routines.""" + +import ast +import string + + +# alias kept for backward compatibility +safeEval = ast.literal_eval + + +class Tag(str): + @staticmethod + def transcode(blob): + if isinstance(blob, bytes): + blob = blob.decode("latin-1") + return blob + + def __new__(self, content): + return str.__new__(self, self.transcode(content)) + + def __ne__(self, other): + return not self.__eq__(other) + + def __eq__(self, other): + return str.__eq__(self, self.transcode(other)) + + def __hash__(self): + return str.__hash__(self) + + def tobytes(self): + return self.encode("latin-1") + + +def readHex(content): + """Convert a list of hex strings to binary data.""" + return deHexStr(strjoin(chunk for chunk in content if isinstance(chunk, str))) + + +def deHexStr(hexdata): + """Convert a hex string to binary data.""" + hexdata = strjoin(hexdata.split()) + if len(hexdata) % 2: + hexdata = hexdata + "0" + data = [] + for i in range(0, len(hexdata), 2): + data.append(bytechr(int(hexdata[i : i + 2], 16))) + return bytesjoin(data) + + +def hexStr(data): + """Convert binary data to a hex string.""" + h = string.hexdigits + r = "" + for c in data: + i = byteord(c) + r = r + h[(i >> 4) & 0xF] + h[i & 0xF] + return r + + +def num2binary(l, bits=32): + items = [] + binary = "" + for i in range(bits): + if l & 0x1: + binary = "1" + binary + else: + binary = "0" + binary + l = l >> 1 + if not ((i + 1) % 8): + items.append(binary) + binary = "" + if binary: + items.append(binary) + items.reverse() + assert l in (0, -1), "number doesn't fit in number of bits" + return " ".join(items) + + +def binary2num(bin): + bin = strjoin(bin.split()) + l = 0 + for digit in bin: + l = l << 1 + if digit != "0": + l = l | 0x1 + return l + + +def caselessSort(alist): + """Return a sorted copy of a list. If there are only strings + in the list, it will not consider case. + """ + + try: + return sorted(alist, key=lambda a: (a.lower(), a)) + except TypeError: + return sorted(alist) + + +def pad(data, size): + r"""Pad byte string 'data' with null bytes until its length is a + multiple of 'size'. + + >>> len(pad(b'abcd', 4)) + 4 + >>> len(pad(b'abcde', 2)) + 6 + >>> len(pad(b'abcde', 4)) + 8 + >>> pad(b'abcdef', 4) == b'abcdef\x00\x00' + True + """ + data = tobytes(data) + if size > 1: + remainder = len(data) % size + if remainder: + data += b"\0" * (size - remainder) + return data + + +def tostr(s, encoding="ascii", errors="strict"): + if not isinstance(s, str): + return s.decode(encoding, errors) + else: + return s + + +def tobytes(s, encoding="ascii", errors="strict"): + if isinstance(s, str): + return s.encode(encoding, errors) + else: + return bytes(s) + + +def bytechr(n): + return bytes([n]) + + +def byteord(c): + return c if isinstance(c, int) else ord(c) + + +def strjoin(iterable, joiner=""): + return tostr(joiner).join(iterable) + + +def bytesjoin(iterable, joiner=b""): + return tobytes(joiner).join(tobytes(item) for item in iterable) + + +if __name__ == "__main__": + import doctest, sys + + sys.exit(doctest.testmod().failed) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/timeTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/timeTools.py new file mode 100644 index 0000000000000000000000000000000000000000..175ce81563daf3e9a924701dd2c9d4b71084c286 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/timeTools.py @@ -0,0 +1,88 @@ +"""fontTools.misc.timeTools.py -- tools for working with OpenType timestamps. +""" + +import os +import time +from datetime import datetime, timezone +import calendar + + +epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0)) + +DAYNAMES = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] +MONTHNAMES = [ + None, + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", +] + + +def asctime(t=None): + """ + Convert a tuple or struct_time representing a time as returned by gmtime() + or localtime() to a 24-character string of the following form: + + >>> asctime(time.gmtime(0)) + 'Thu Jan 1 00:00:00 1970' + + If t is not provided, the current time as returned by localtime() is used. + Locale information is not used by asctime(). + + This is meant to normalise the output of the built-in time.asctime() across + different platforms and Python versions. + In Python 3.x, the day of the month is right-justified, whereas on Windows + Python 2.7 it is padded with zeros. + + See https://github.com/fonttools/fonttools/issues/455 + """ + if t is None: + t = time.localtime() + s = "%s %s %2s %s" % ( + DAYNAMES[t.tm_wday], + MONTHNAMES[t.tm_mon], + t.tm_mday, + time.strftime("%H:%M:%S %Y", t), + ) + return s + + +def timestampToString(value): + return asctime(time.gmtime(max(0, value + epoch_diff))) + + +def timestampFromString(value): + wkday, mnth = value[:7].split() + t = datetime.strptime(value[7:], " %d %H:%M:%S %Y") + t = t.replace(month=MONTHNAMES.index(mnth), tzinfo=timezone.utc) + wkday_idx = DAYNAMES.index(wkday) + assert t.weekday() == wkday_idx, '"' + value + '" has inconsistent weekday' + return int(t.timestamp()) - epoch_diff + + +def timestampNow(): + # https://reproducible-builds.org/specs/source-date-epoch/ + source_date_epoch = os.environ.get("SOURCE_DATE_EPOCH") + if source_date_epoch is not None: + return int(source_date_epoch) - epoch_diff + return int(time.time() - epoch_diff) + + +def timestampSinceEpoch(value): + return int(value - epoch_diff) + + +if __name__ == "__main__": + import sys + import doctest + + sys.exit(doctest.testmod().failed) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/treeTools.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/treeTools.py new file mode 100644 index 0000000000000000000000000000000000000000..24e10ba5b19ef41d56a552527680a4c73503cc3c --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/treeTools.py @@ -0,0 +1,45 @@ +"""Generic tools for working with trees.""" + +from math import ceil, log + + +def build_n_ary_tree(leaves, n): + """Build N-ary tree from sequence of leaf nodes. + + Return a list of lists where each non-leaf node is a list containing + max n nodes. + """ + if not leaves: + return [] + + assert n > 1 + + depth = ceil(log(len(leaves), n)) + + if depth <= 1: + return list(leaves) + + # Fully populate complete subtrees of root until we have enough leaves left + root = [] + unassigned = None + full_step = n ** (depth - 1) + for i in range(0, len(leaves), full_step): + subtree = leaves[i : i + full_step] + if len(subtree) < full_step: + unassigned = subtree + break + while len(subtree) > n: + subtree = [subtree[k : k + n] for k in range(0, len(subtree), n)] + root.append(subtree) + + if unassigned: + # Recurse to fill the last subtree, which is the only partially populated one + subtree = build_n_ary_tree(unassigned, n) + if len(subtree) <= n - len(root): + # replace last subtree with its children if they can still fit + root.extend(subtree) + else: + root.append(subtree) + assert len(root) <= n + + return root diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/vector.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/vector.py new file mode 100644 index 0000000000000000000000000000000000000000..02c62e6512a04f3497f7c9987a1f414b30cf6b05 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/vector.py @@ -0,0 +1,147 @@ +from numbers import Number +import math +import operator +import warnings + + +__all__ = ["Vector"] + + +class Vector(tuple): + """A math-like vector. + + Represents an n-dimensional numeric vector. ``Vector`` objects support + vector addition and subtraction, scalar multiplication and division, + negation, rounding, and comparison tests. + """ + + __slots__ = () + + def __new__(cls, values, keep=False): + if keep is not False: + warnings.warn( + "the 'keep' argument has been deprecated", + DeprecationWarning, + ) + if type(values) == Vector: + # No need to create a new object + return values + return super().__new__(cls, values) + + def __repr__(self): + return f"{self.__class__.__name__}({super().__repr__()})" + + def _vectorOp(self, other, op): + if isinstance(other, Vector): + assert len(self) == len(other) + return self.__class__(op(a, b) for a, b in zip(self, other)) + if isinstance(other, Number): + return self.__class__(op(v, other) for v in self) + raise NotImplementedError() + + def _scalarOp(self, other, op): + if isinstance(other, Number): + return self.__class__(op(v, other) for v in self) + raise NotImplementedError() + + def _unaryOp(self, op): + return self.__class__(op(v) for v in self) + + def __add__(self, other): + return self._vectorOp(other, operator.add) + + __radd__ = __add__ + + def __sub__(self, other): + return self._vectorOp(other, operator.sub) + + def __rsub__(self, other): + return self._vectorOp(other, _operator_rsub) + + def __mul__(self, other): + return self._scalarOp(other, operator.mul) + + __rmul__ = __mul__ + + def __truediv__(self, other): + return self._scalarOp(other, operator.truediv) + + def __rtruediv__(self, other): + return self._scalarOp(other, _operator_rtruediv) + + def __pos__(self): + return self._unaryOp(operator.pos) + + def __neg__(self): + return self._unaryOp(operator.neg) + + def __round__(self, *, round=round): + return self._unaryOp(round) + + def __eq__(self, other): + if isinstance(other, list): + # bw compat Vector([1, 2, 3]) == [1, 2, 3] + other = tuple(other) + return super().__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def __bool__(self): + return any(self) + + __nonzero__ = __bool__ + + def __abs__(self): + return math.sqrt(sum(x * x for x in self)) + + def length(self): + """Return the length of the vector. Equivalent to abs(vector).""" + return abs(self) + + def normalized(self): + """Return the normalized vector of the vector.""" + return self / abs(self) + + def dot(self, other): + """Performs vector dot product, returning the sum of + ``a[0] * b[0], a[1] * b[1], ...``""" + assert len(self) == len(other) + return sum(a * b for a, b in zip(self, other)) + + # Deprecated methods/properties + + def toInt(self): + warnings.warn( + "the 'toInt' method has been deprecated, use round(vector) instead", + DeprecationWarning, + ) + return self.__round__() + + @property + def values(self): + warnings.warn( + "the 'values' attribute has been deprecated, use " + "the vector object itself instead", + DeprecationWarning, + ) + return list(self) + + @values.setter + def values(self, values): + raise AttributeError( + "can't set attribute, the 'values' attribute has been deprecated", + ) + + def isclose(self, other: "Vector", **kwargs) -> bool: + """Return True if the vector is close to another Vector.""" + assert len(self) == len(other) + return all(math.isclose(a, b, **kwargs) for a, b in zip(self, other)) + + +def _operator_rsub(a, b): + return operator.sub(b, a) + + +def _operator_rtruediv(a, b): + return operator.truediv(b, a) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/visitor.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/visitor.py new file mode 100644 index 0000000000000000000000000000000000000000..6de432ef9342384301e1cc4b76abe8d52aba1208 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/visitor.py @@ -0,0 +1,142 @@ +"""Generic visitor pattern implementation for Python objects.""" + +import enum + + +class Visitor(object): + defaultStop = False + + @classmethod + def _register(celf, clazzes_attrs): + assert celf != Visitor, "Subclass Visitor instead." + if "_visitors" not in celf.__dict__: + celf._visitors = {} + + def wrapper(method): + assert method.__name__ == "visit" + for clazzes, attrs in clazzes_attrs: + if type(clazzes) != tuple: + clazzes = (clazzes,) + if type(attrs) == str: + attrs = (attrs,) + for clazz in clazzes: + _visitors = celf._visitors.setdefault(clazz, {}) + for attr in attrs: + assert attr not in _visitors, ( + "Oops, class '%s' has visitor function for '%s' defined already." + % (clazz.__name__, attr) + ) + _visitors[attr] = method + return None + + return wrapper + + @classmethod + def register(celf, clazzes): + if type(clazzes) != tuple: + clazzes = (clazzes,) + return celf._register([(clazzes, (None,))]) + + @classmethod + def register_attr(celf, clazzes, attrs): + clazzes_attrs = [] + if type(clazzes) != tuple: + clazzes = (clazzes,) + if type(attrs) == str: + attrs = (attrs,) + for clazz in clazzes: + clazzes_attrs.append((clazz, attrs)) + return celf._register(clazzes_attrs) + + @classmethod + def register_attrs(celf, clazzes_attrs): + return celf._register(clazzes_attrs) + + @classmethod + def _visitorsFor(celf, thing, _default={}): + typ = type(thing) + + for celf in celf.mro(): + _visitors = getattr(celf, "_visitors", None) + if _visitors is None: + break + + for base in typ.mro(): + m = celf._visitors.get(base, None) + if m is not None: + return m + + return _default + + def visitObject(self, obj, *args, **kwargs): + """Called to visit an object. This function loops over all non-private + attributes of the objects and calls any user-registered (via + @register_attr() or @register_attrs()) visit() functions. + + If there is no user-registered visit function, of if there is and it + returns True, or it returns None (or doesn't return anything) and + visitor.defaultStop is False (default), then the visitor will proceed + to call self.visitAttr()""" + + keys = sorted(vars(obj).keys()) + _visitors = self._visitorsFor(obj) + defaultVisitor = _visitors.get("*", None) + for key in keys: + if key[0] == "_": + continue + value = getattr(obj, key) + visitorFunc = _visitors.get(key, defaultVisitor) + if visitorFunc is not None: + ret = visitorFunc(self, obj, key, value, *args, **kwargs) + if ret == False or (ret is None and self.defaultStop): + continue + self.visitAttr(obj, key, value, *args, **kwargs) + + def visitAttr(self, obj, attr, value, *args, **kwargs): + """Called to visit an attribute of an object.""" + self.visit(value, *args, **kwargs) + + def visitList(self, obj, *args, **kwargs): + """Called to visit any value that is a list.""" + for value in obj: + self.visit(value, *args, **kwargs) + + def visitDict(self, obj, *args, **kwargs): + """Called to visit any value that is a dictionary.""" + for value in obj.values(): + self.visit(value, *args, **kwargs) + + def visitLeaf(self, obj, *args, **kwargs): + """Called to visit any value that is not an object, list, + or dictionary.""" + pass + + def visit(self, obj, *args, **kwargs): + """This is the main entry to the visitor. The visitor will visit object + obj. + + The visitor will first determine if there is a registered (via + @register()) visit function for the type of object. If there is, it + will be called, and (visitor, obj, *args, **kwargs) will be passed to + the user visit function. + + If there is no user-registered visit function, of if there is and it + returns True, or it returns None (or doesn't return anything) and + visitor.defaultStop is False (default), then the visitor will proceed + to dispatch to one of self.visitObject(), self.visitList(), + self.visitDict(), or self.visitLeaf() (any of which can be overriden in + a subclass).""" + + visitorFunc = self._visitorsFor(obj).get(None, None) + if visitorFunc is not None: + ret = visitorFunc(self, obj, *args, **kwargs) + if ret == False or (ret is None and self.defaultStop): + return + if hasattr(obj, "__dict__") and not isinstance(obj, enum.Enum): + self.visitObject(obj, *args, **kwargs) + elif isinstance(obj, list): + self.visitList(obj, *args, **kwargs) + elif isinstance(obj, dict): + self.visitDict(obj, *args, **kwargs) + else: + self.visitLeaf(obj, *args, **kwargs) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__main__.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..29c802bcc83b3ca35bbd0e6521f47a368b5f9092 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__main__.py @@ -0,0 +1,5 @@ +import sys +from fontTools.mtiLib import main + +if __name__ == "__main__": + sys.exit(main()) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__pycache__/__init__.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7cae453d450260d958d06ce75c7a48667503409c Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__pycache__/__main__.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b51e3d283f6fc6b23fc7ba272a96d5bd1974466d Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__pycache__/__main__.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/pens/__pycache__/filterPen.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/pens/__pycache__/filterPen.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85b3722e79a0477dd1451f90b1dc749a016c98d2 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/pens/__pycache__/filterPen.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__init__.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ce357417c7139664a194a6826220889f5ed59894 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qu2cu import * diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__main__.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..7c85f61b419b66d523ca0b0089a327fb4e0278aa --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__main__.py @@ -0,0 +1,7 @@ +import sys + +from .cli import _main as main + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/__init__.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..980acc7199787369610933e6c2a4c753ed9798bb Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/__main__.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bbf4562b92009c77b70c5626055ccedca3a7c413 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/__main__.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/benchmark.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/benchmark.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3969afb990f35cdeab1ee911f364d7122fe950c9 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/benchmark.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/qu2cu.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/qu2cu.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90eb9e2979a89b1318cf203a664114ae3a947d58 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/__pycache__/qu2cu.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/cli.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..101e938a6fc0ae4f04408074e579ffe33897236d --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/qu2cu/cli.py @@ -0,0 +1,125 @@ +import os +import argparse +import logging +from fontTools.misc.cliTools import makeOutputFileName +from fontTools.ttLib import TTFont +from fontTools.pens.qu2cuPen import Qu2CuPen +from fontTools.pens.ttGlyphPen import TTGlyphPen +import fontTools + + +logger = logging.getLogger("fontTools.qu2cu") + + +def _font_to_cubic(input_path, output_path=None, **kwargs): + font = TTFont(input_path) + logger.info("Converting curves for %s", input_path) + + stats = {} if kwargs["dump_stats"] else None + qu2cu_kwargs = { + "stats": stats, + "max_err": kwargs["max_err_em"] * font["head"].unitsPerEm, + "all_cubic": kwargs["all_cubic"], + } + + assert "gvar" not in font, "Cannot convert variable font" + glyphSet = font.getGlyphSet() + glyphOrder = font.getGlyphOrder() + glyf = font["glyf"] + for glyphName in glyphOrder: + glyph = glyphSet[glyphName] + ttpen = TTGlyphPen(glyphSet) + pen = Qu2CuPen(ttpen, **qu2cu_kwargs) + glyph.draw(pen) + glyf[glyphName] = ttpen.glyph(dropImpliedOnCurves=True) + + font["head"].glyphDataFormat = 1 + + if kwargs["dump_stats"]: + logger.info("Stats: %s", stats) + + logger.info("Saving %s", output_path) + font.save(output_path) + + +def _main(args=None): + """Convert an OpenType font from quadratic to cubic curves""" + parser = argparse.ArgumentParser(prog="qu2cu") + parser.add_argument("--version", action="version", version=fontTools.__version__) + parser.add_argument( + "infiles", + nargs="+", + metavar="INPUT", + help="one or more input TTF source file(s).", + ) + parser.add_argument("-v", "--verbose", action="count", default=0) + parser.add_argument( + "-e", + "--conversion-error", + type=float, + metavar="ERROR", + default=0.001, + help="maxiumum approximation error measured in EM (default: 0.001)", + ) + parser.add_argument( + "-c", + "--all-cubic", + default=False, + action="store_true", + help="whether to only use cubic curves", + ) + + output_parser = parser.add_mutually_exclusive_group() + output_parser.add_argument( + "-o", + "--output-file", + default=None, + metavar="OUTPUT", + help=("output filename for the converted TTF."), + ) + output_parser.add_argument( + "-d", + "--output-dir", + default=None, + metavar="DIRECTORY", + help="output directory where to save converted TTFs", + ) + + options = parser.parse_args(args) + + if not options.verbose: + level = "WARNING" + elif options.verbose == 1: + level = "INFO" + else: + level = "DEBUG" + logging.basicConfig(level=level) + + if len(options.infiles) > 1 and options.output_file: + parser.error("-o/--output-file can't be used with multile inputs") + + if options.output_dir: + output_dir = options.output_dir + if not os.path.exists(output_dir): + os.mkdir(output_dir) + elif not os.path.isdir(output_dir): + parser.error("'%s' is not a directory" % output_dir) + output_paths = [ + os.path.join(output_dir, os.path.basename(p)) for p in options.infiles + ] + elif options.output_file: + output_paths = [options.output_file] + else: + output_paths = [ + makeOutputFileName(p, overWrite=True, suffix=".cubic") + for p in options.infiles + ] + + kwargs = dict( + dump_stats=options.verbose > 0, + max_err_em=options.conversion_error, + all_cubic=options.all_cubic, + ) + + for input_path, output_path in zip(options.infiles, output_paths): + _font_to_cubic(input_path, output_path, **kwargs) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/etree.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/etree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa5fc685f4f66c7f4214ff50f23d07502e65e722 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/etree.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/filenames.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/filenames.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b91faf562bcbf4666d599807f3acb467fdd264d2 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/filenames.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/glifLib.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/glifLib.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90b1e3c16716637a45af122fd950550c5331d84f Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/glifLib.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/pointPen.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/pointPen.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..970d60262c1f656779fb3f3f05f56290deed47b3 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/pointPen.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/utils.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9897f4e1296679c4610068658b871eb8cced147c Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/utils.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/validators.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/validators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a7cb2581670f5c0ad1bc1ca7e72916b5063928d Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/__pycache__/validators.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/errors.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..e05dd438b430708aac5163ebfde74ffb0501fbd1 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/errors.py @@ -0,0 +1,22 @@ +from __future__ import annotations + + +class UFOLibError(Exception): + pass + + +class UnsupportedUFOFormat(UFOLibError): + pass + + +class GlifLibError(UFOLibError): + def _add_note(self, note: str) -> None: + # Loose backport of PEP 678 until we only support Python 3.11+, used for + # adding additional context to errors. + # TODO: Replace with https://docs.python.org/3.11/library/exceptions.html#BaseException.add_note + (message, *rest) = self.args + self.args = ((message + "\n" + note), *rest) + + +class UnsupportedGLIFFormat(GlifLibError): + pass diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/glifLib.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/glifLib.py new file mode 100644 index 0000000000000000000000000000000000000000..62e87db0df072dc88f143c7d088e36e14582148d --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/glifLib.py @@ -0,0 +1,2014 @@ +""" +glifLib.py -- Generic module for reading and writing the .glif format. + +More info about the .glif format (GLyphInterchangeFormat) can be found here: + + http://unifiedfontobject.org + +The main class in this module is GlyphSet. It manages a set of .glif files +in a folder. It offers two ways to read glyph data, and one way to write +glyph data. See the class doc string for details. +""" + +from __future__ import annotations + +import logging +import enum +from warnings import warn +from collections import OrderedDict +import fs +import fs.base +import fs.errors +import fs.osfs +import fs.path +from fontTools.misc.textTools import tobytes +from fontTools.misc import plistlib +from fontTools.pens.pointPen import AbstractPointPen, PointToSegmentPen +from fontTools.ufoLib.errors import GlifLibError +from fontTools.ufoLib.filenames import userNameToFileName +from fontTools.ufoLib.validators import ( + genericTypeValidator, + colorValidator, + guidelinesValidator, + anchorsValidator, + identifierValidator, + imageValidator, + glyphLibValidator, +) +from fontTools.misc import etree +from fontTools.ufoLib import _UFOBaseIO, UFOFormatVersion +from fontTools.ufoLib.utils import numberTypes, _VersionTupleEnumMixin + + +__all__ = [ + "GlyphSet", + "GlifLibError", + "readGlyphFromString", + "writeGlyphToString", + "glyphNameToFileName", +] + +logger = logging.getLogger(__name__) + + +# --------- +# Constants +# --------- + +CONTENTS_FILENAME = "contents.plist" +LAYERINFO_FILENAME = "layerinfo.plist" + + +class GLIFFormatVersion(tuple, _VersionTupleEnumMixin, enum.Enum): + FORMAT_1_0 = (1, 0) + FORMAT_2_0 = (2, 0) + + @classmethod + def default(cls, ufoFormatVersion=None): + if ufoFormatVersion is not None: + return max(cls.supported_versions(ufoFormatVersion)) + return super().default() + + @classmethod + def supported_versions(cls, ufoFormatVersion=None): + if ufoFormatVersion is None: + # if ufo format unspecified, return all the supported GLIF formats + return super().supported_versions() + # else only return the GLIF formats supported by the given UFO format + versions = {cls.FORMAT_1_0} + if ufoFormatVersion >= UFOFormatVersion.FORMAT_3_0: + versions.add(cls.FORMAT_2_0) + return frozenset(versions) + + +# workaround for py3.11, see https://github.com/fonttools/fonttools/pull/2655 +GLIFFormatVersion.__str__ = _VersionTupleEnumMixin.__str__ + + +# ------------ +# Simple Glyph +# ------------ + + +class Glyph: + """ + Minimal glyph object. It has no glyph attributes until either + the draw() or the drawPoints() method has been called. + """ + + def __init__(self, glyphName, glyphSet): + self.glyphName = glyphName + self.glyphSet = glyphSet + + def draw(self, pen, outputImpliedClosingLine=False): + """ + Draw this glyph onto a *FontTools* Pen. + """ + pointPen = PointToSegmentPen( + pen, outputImpliedClosingLine=outputImpliedClosingLine + ) + self.drawPoints(pointPen) + + def drawPoints(self, pointPen): + """ + Draw this glyph onto a PointPen. + """ + self.glyphSet.readGlyph(self.glyphName, self, pointPen) + + +# --------- +# Glyph Set +# --------- + + +class GlyphSet(_UFOBaseIO): + """ + GlyphSet manages a set of .glif files inside one directory. + + GlyphSet's constructor takes a path to an existing directory as it's + first argument. Reading glyph data can either be done through the + readGlyph() method, or by using GlyphSet's dictionary interface, where + the keys are glyph names and the values are (very) simple glyph objects. + + To write a glyph to the glyph set, you use the writeGlyph() method. + The simple glyph objects returned through the dict interface do not + support writing, they are just a convenient way to get at the glyph data. + """ + + glyphClass = Glyph + + def __init__( + self, + path, + glyphNameToFileNameFunc=None, + ufoFormatVersion=None, + validateRead=True, + validateWrite=True, + expectContentsFile=False, + ): + """ + 'path' should be a path (string) to an existing local directory, or + an instance of fs.base.FS class. + + The optional 'glyphNameToFileNameFunc' argument must be a callback + function that takes two arguments: a glyph name and a list of all + existing filenames (if any exist). It should return a file name + (including the .glif extension). The glyphNameToFileName function + is called whenever a file name is created for a given glyph name. + + ``validateRead`` will validate read operations. Its default is ``True``. + ``validateWrite`` will validate write operations. Its default is ``True``. + ``expectContentsFile`` will raise a GlifLibError if a contents.plist file is + not found on the glyph set file system. This should be set to ``True`` if you + are reading an existing UFO and ``False`` if you create a fresh glyph set. + """ + try: + ufoFormatVersion = UFOFormatVersion(ufoFormatVersion) + except ValueError as e: + from fontTools.ufoLib.errors import UnsupportedUFOFormat + + raise UnsupportedUFOFormat( + f"Unsupported UFO format: {ufoFormatVersion!r}" + ) from e + + if hasattr(path, "__fspath__"): # support os.PathLike objects + path = path.__fspath__() + + if isinstance(path, str): + try: + filesystem = fs.osfs.OSFS(path) + except fs.errors.CreateFailed: + raise GlifLibError("No glyphs directory '%s'" % path) + self._shouldClose = True + elif isinstance(path, fs.base.FS): + filesystem = path + try: + filesystem.check() + except fs.errors.FilesystemClosed: + raise GlifLibError("the filesystem '%s' is closed" % filesystem) + self._shouldClose = False + else: + raise TypeError( + "Expected a path string or fs object, found %s" % type(path).__name__ + ) + try: + path = filesystem.getsyspath("/") + except fs.errors.NoSysPath: + # network or in-memory FS may not map to the local one + path = str(filesystem) + # 'dirName' is kept for backward compatibility only, but it's DEPRECATED + # as it's not guaranteed that it maps to an existing OSFS directory. + # Client could use the FS api via the `self.fs` attribute instead. + self.dirName = fs.path.parts(path)[-1] + self.fs = filesystem + # if glyphSet contains no 'contents.plist', we consider it empty + self._havePreviousFile = filesystem.exists(CONTENTS_FILENAME) + if expectContentsFile and not self._havePreviousFile: + raise GlifLibError(f"{CONTENTS_FILENAME} is missing.") + # attribute kept for backward compatibility + self.ufoFormatVersion = ufoFormatVersion.major + self.ufoFormatVersionTuple = ufoFormatVersion + if glyphNameToFileNameFunc is None: + glyphNameToFileNameFunc = glyphNameToFileName + self.glyphNameToFileName = glyphNameToFileNameFunc + self._validateRead = validateRead + self._validateWrite = validateWrite + self._existingFileNames: set[str] | None = None + self._reverseContents = None + + self.rebuildContents() + + def rebuildContents(self, validateRead=None): + """ + Rebuild the contents dict by loading contents.plist. + + ``validateRead`` will validate the data, by default it is set to the + class's ``validateRead`` value, can be overridden. + """ + if validateRead is None: + validateRead = self._validateRead + contents = self._getPlist(CONTENTS_FILENAME, {}) + # validate the contents + if validateRead: + invalidFormat = False + if not isinstance(contents, dict): + invalidFormat = True + else: + for name, fileName in contents.items(): + if not isinstance(name, str): + invalidFormat = True + if not isinstance(fileName, str): + invalidFormat = True + elif not self.fs.exists(fileName): + raise GlifLibError( + "%s references a file that does not exist: %s" + % (CONTENTS_FILENAME, fileName) + ) + if invalidFormat: + raise GlifLibError("%s is not properly formatted" % CONTENTS_FILENAME) + self.contents = contents + self._existingFileNames = None + self._reverseContents = None + + def getReverseContents(self): + """ + Return a reversed dict of self.contents, mapping file names to + glyph names. This is primarily an aid for custom glyph name to file + name schemes that want to make sure they don't generate duplicate + file names. The file names are converted to lowercase so we can + reliably check for duplicates that only differ in case, which is + important for case-insensitive file systems. + """ + if self._reverseContents is None: + d = {} + for k, v in self.contents.items(): + d[v.lower()] = k + self._reverseContents = d + return self._reverseContents + + def writeContents(self): + """ + Write the contents.plist file out to disk. Call this method when + you're done writing glyphs. + """ + self._writePlist(CONTENTS_FILENAME, self.contents) + + # layer info + + def readLayerInfo(self, info, validateRead=None): + """ + ``validateRead`` will validate the data, by default it is set to the + class's ``validateRead`` value, can be overridden. + """ + if validateRead is None: + validateRead = self._validateRead + infoDict = self._getPlist(LAYERINFO_FILENAME, {}) + if validateRead: + if not isinstance(infoDict, dict): + raise GlifLibError("layerinfo.plist is not properly formatted.") + infoDict = validateLayerInfoVersion3Data(infoDict) + # populate the object + for attr, value in infoDict.items(): + try: + setattr(info, attr, value) + except AttributeError: + raise GlifLibError( + "The supplied layer info object does not support setting a necessary attribute (%s)." + % attr + ) + + def writeLayerInfo(self, info, validateWrite=None): + """ + ``validateWrite`` will validate the data, by default it is set to the + class's ``validateWrite`` value, can be overridden. + """ + if validateWrite is None: + validateWrite = self._validateWrite + if self.ufoFormatVersionTuple.major < 3: + raise GlifLibError( + "layerinfo.plist is not allowed in UFO %d." + % self.ufoFormatVersionTuple.major + ) + # gather data + infoData = {} + for attr in layerInfoVersion3ValueData.keys(): + if hasattr(info, attr): + try: + value = getattr(info, attr) + except AttributeError: + raise GlifLibError( + "The supplied info object does not support getting a necessary attribute (%s)." + % attr + ) + if value is None or (attr == "lib" and not value): + continue + infoData[attr] = value + if infoData: + # validate + if validateWrite: + infoData = validateLayerInfoVersion3Data(infoData) + # write file + self._writePlist(LAYERINFO_FILENAME, infoData) + elif self._havePreviousFile and self.fs.exists(LAYERINFO_FILENAME): + # data empty, remove existing file + self.fs.remove(LAYERINFO_FILENAME) + + def getGLIF(self, glyphName): + """ + Get the raw GLIF text for a given glyph name. This only works + for GLIF files that are already on disk. + + This method is useful in situations when the raw XML needs to be + read from a glyph set for a particular glyph before fully parsing + it into an object structure via the readGlyph method. + + Raises KeyError if 'glyphName' is not in contents.plist, or + GlifLibError if the file associated with can't be found. + """ + fileName = self.contents[glyphName] + try: + return self.fs.readbytes(fileName) + except fs.errors.ResourceNotFound: + raise GlifLibError( + "The file '%s' associated with glyph '%s' in contents.plist " + "does not exist on %s" % (fileName, glyphName, self.fs) + ) + + def getGLIFModificationTime(self, glyphName): + """ + Returns the modification time for the GLIF file with 'glyphName', as + a floating point number giving the number of seconds since the epoch. + Return None if the associated file does not exist or the underlying + filesystem does not support getting modified times. + Raises KeyError if the glyphName is not in contents.plist. + """ + fileName = self.contents[glyphName] + return self.getFileModificationTime(fileName) + + # reading/writing API + + def readGlyph(self, glyphName, glyphObject=None, pointPen=None, validate=None): + """ + Read a .glif file for 'glyphName' from the glyph set. The + 'glyphObject' argument can be any kind of object (even None); + the readGlyph() method will attempt to set the following + attributes on it: + + width + the advance width of the glyph + height + the advance height of the glyph + unicodes + a list of unicode values for this glyph + note + a string + lib + a dictionary containing custom data + image + a dictionary containing image data + guidelines + a list of guideline data dictionaries + anchors + a list of anchor data dictionaries + + All attributes are optional, in two ways: + + 1) An attribute *won't* be set if the .glif file doesn't + contain data for it. 'glyphObject' will have to deal + with default values itself. + 2) If setting the attribute fails with an AttributeError + (for example if the 'glyphObject' attribute is read- + only), readGlyph() will not propagate that exception, + but ignore that attribute. + + To retrieve outline information, you need to pass an object + conforming to the PointPen protocol as the 'pointPen' argument. + This argument may be None if you don't need the outline data. + + readGlyph() will raise KeyError if the glyph is not present in + the glyph set. + + ``validate`` will validate the data, by default it is set to the + class's ``validateRead`` value, can be overridden. + """ + if validate is None: + validate = self._validateRead + text = self.getGLIF(glyphName) + try: + tree = _glifTreeFromString(text) + formatVersions = GLIFFormatVersion.supported_versions( + self.ufoFormatVersionTuple + ) + _readGlyphFromTree( + tree, + glyphObject, + pointPen, + formatVersions=formatVersions, + validate=validate, + ) + except GlifLibError as glifLibError: + # Re-raise with a note that gives extra context, describing where + # the error occurred. + fileName = self.contents[glyphName] + try: + glifLocation = f"'{self.fs.getsyspath(fileName)}'" + except fs.errors.NoSysPath: + # Network or in-memory FS may not map to a local path, so use + # the best string representation we have. + glifLocation = f"'{fileName}' from '{str(self.fs)}'" + + glifLibError._add_note( + f"The issue is in glyph '{glyphName}', located in {glifLocation}." + ) + raise + + def writeGlyph( + self, + glyphName, + glyphObject=None, + drawPointsFunc=None, + formatVersion=None, + validate=None, + ): + """ + Write a .glif file for 'glyphName' to the glyph set. The + 'glyphObject' argument can be any kind of object (even None); + the writeGlyph() method will attempt to get the following + attributes from it: + + width + the advance width of the glyph + height + the advance height of the glyph + unicodes + a list of unicode values for this glyph + note + a string + lib + a dictionary containing custom data + image + a dictionary containing image data + guidelines + a list of guideline data dictionaries + anchors + a list of anchor data dictionaries + + All attributes are optional: if 'glyphObject' doesn't + have the attribute, it will simply be skipped. + + To write outline data to the .glif file, writeGlyph() needs + a function (any callable object actually) that will take one + argument: an object that conforms to the PointPen protocol. + The function will be called by writeGlyph(); it has to call the + proper PointPen methods to transfer the outline to the .glif file. + + The GLIF format version will be chosen based on the ufoFormatVersion + passed during the creation of this object. If a particular format + version is desired, it can be passed with the formatVersion argument. + The formatVersion argument accepts either a tuple of integers for + (major, minor), or a single integer for the major digit only (with + minor digit implied as 0). + + An UnsupportedGLIFFormat exception is raised if the requested GLIF + formatVersion is not supported. + + ``validate`` will validate the data, by default it is set to the + class's ``validateWrite`` value, can be overridden. + """ + if formatVersion is None: + formatVersion = GLIFFormatVersion.default(self.ufoFormatVersionTuple) + else: + try: + formatVersion = GLIFFormatVersion(formatVersion) + except ValueError as e: + from fontTools.ufoLib.errors import UnsupportedGLIFFormat + + raise UnsupportedGLIFFormat( + f"Unsupported GLIF format version: {formatVersion!r}" + ) from e + if formatVersion not in GLIFFormatVersion.supported_versions( + self.ufoFormatVersionTuple + ): + from fontTools.ufoLib.errors import UnsupportedGLIFFormat + + raise UnsupportedGLIFFormat( + f"Unsupported GLIF format version ({formatVersion!s}) " + f"for UFO format version {self.ufoFormatVersionTuple!s}." + ) + if validate is None: + validate = self._validateWrite + fileName = self.contents.get(glyphName) + if fileName is None: + if self._existingFileNames is None: + self._existingFileNames = { + fileName.lower() for fileName in self.contents.values() + } + fileName = self.glyphNameToFileName(glyphName, self._existingFileNames) + self.contents[glyphName] = fileName + self._existingFileNames.add(fileName.lower()) + if self._reverseContents is not None: + self._reverseContents[fileName.lower()] = glyphName + data = _writeGlyphToBytes( + glyphName, + glyphObject, + drawPointsFunc, + formatVersion=formatVersion, + validate=validate, + ) + if ( + self._havePreviousFile + and self.fs.exists(fileName) + and data == self.fs.readbytes(fileName) + ): + return + self.fs.writebytes(fileName, data) + + def deleteGlyph(self, glyphName): + """Permanently delete the glyph from the glyph set on disk. Will + raise KeyError if the glyph is not present in the glyph set. + """ + fileName = self.contents[glyphName] + self.fs.remove(fileName) + if self._existingFileNames is not None: + self._existingFileNames.remove(fileName.lower()) + if self._reverseContents is not None: + del self._reverseContents[fileName.lower()] + del self.contents[glyphName] + + # dict-like support + + def keys(self): + return list(self.contents.keys()) + + def has_key(self, glyphName): + return glyphName in self.contents + + __contains__ = has_key + + def __len__(self): + return len(self.contents) + + def __getitem__(self, glyphName): + if glyphName not in self.contents: + raise KeyError(glyphName) + return self.glyphClass(glyphName, self) + + # quickly fetch unicode values + + def getUnicodes(self, glyphNames=None): + """ + Return a dictionary that maps glyph names to lists containing + the unicode value[s] for that glyph, if any. This parses the .glif + files partially, so it is a lot faster than parsing all files completely. + By default this checks all glyphs, but a subset can be passed with glyphNames. + """ + unicodes = {} + if glyphNames is None: + glyphNames = self.contents.keys() + for glyphName in glyphNames: + text = self.getGLIF(glyphName) + unicodes[glyphName] = _fetchUnicodes(text) + return unicodes + + def getComponentReferences(self, glyphNames=None): + """ + Return a dictionary that maps glyph names to lists containing the + base glyph name of components in the glyph. This parses the .glif + files partially, so it is a lot faster than parsing all files completely. + By default this checks all glyphs, but a subset can be passed with glyphNames. + """ + components = {} + if glyphNames is None: + glyphNames = self.contents.keys() + for glyphName in glyphNames: + text = self.getGLIF(glyphName) + components[glyphName] = _fetchComponentBases(text) + return components + + def getImageReferences(self, glyphNames=None): + """ + Return a dictionary that maps glyph names to the file name of the image + referenced by the glyph. This parses the .glif files partially, so it is a + lot faster than parsing all files completely. + By default this checks all glyphs, but a subset can be passed with glyphNames. + """ + images = {} + if glyphNames is None: + glyphNames = self.contents.keys() + for glyphName in glyphNames: + text = self.getGLIF(glyphName) + images[glyphName] = _fetchImageFileName(text) + return images + + def close(self): + if self._shouldClose: + self.fs.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + self.close() + + +# ----------------------- +# Glyph Name to File Name +# ----------------------- + + +def glyphNameToFileName(glyphName, existingFileNames): + """ + Wrapper around the userNameToFileName function in filenames.py + + Note that existingFileNames should be a set for large glyphsets + or performance will suffer. + """ + if existingFileNames is None: + existingFileNames = set() + return userNameToFileName(glyphName, existing=existingFileNames, suffix=".glif") + + +# ----------------------- +# GLIF To and From String +# ----------------------- + + +def readGlyphFromString( + aString, + glyphObject=None, + pointPen=None, + formatVersions=None, + validate=True, +): + """ + Read .glif data from a string into a glyph object. + + The 'glyphObject' argument can be any kind of object (even None); + the readGlyphFromString() method will attempt to set the following + attributes on it: + + width + the advance width of the glyph + height + the advance height of the glyph + unicodes + a list of unicode values for this glyph + note + a string + lib + a dictionary containing custom data + image + a dictionary containing image data + guidelines + a list of guideline data dictionaries + anchors + a list of anchor data dictionaries + + All attributes are optional, in two ways: + + 1) An attribute *won't* be set if the .glif file doesn't + contain data for it. 'glyphObject' will have to deal + with default values itself. + 2) If setting the attribute fails with an AttributeError + (for example if the 'glyphObject' attribute is read- + only), readGlyphFromString() will not propagate that + exception, but ignore that attribute. + + To retrieve outline information, you need to pass an object + conforming to the PointPen protocol as the 'pointPen' argument. + This argument may be None if you don't need the outline data. + + The formatVersions optional argument define the GLIF format versions + that are allowed to be read. + The type is Optional[Iterable[Tuple[int, int], int]]. It can contain + either integers (for the major versions to be allowed, with minor + digits defaulting to 0), or tuples of integers to specify both + (major, minor) versions. + By default when formatVersions is None all the GLIF format versions + currently defined are allowed to be read. + + ``validate`` will validate the read data. It is set to ``True`` by default. + """ + tree = _glifTreeFromString(aString) + + if formatVersions is None: + validFormatVersions = GLIFFormatVersion.supported_versions() + else: + validFormatVersions, invalidFormatVersions = set(), set() + for v in formatVersions: + try: + formatVersion = GLIFFormatVersion(v) + except ValueError: + invalidFormatVersions.add(v) + else: + validFormatVersions.add(formatVersion) + if not validFormatVersions: + raise ValueError( + "None of the requested GLIF formatVersions are supported: " + f"{formatVersions!r}" + ) + + _readGlyphFromTree( + tree, + glyphObject, + pointPen, + formatVersions=validFormatVersions, + validate=validate, + ) + + +def _writeGlyphToBytes( + glyphName, + glyphObject=None, + drawPointsFunc=None, + writer=None, + formatVersion=None, + validate=True, +): + """Return .glif data for a glyph as a UTF-8 encoded bytes string.""" + try: + formatVersion = GLIFFormatVersion(formatVersion) + except ValueError: + from fontTools.ufoLib.errors import UnsupportedGLIFFormat + + raise UnsupportedGLIFFormat( + "Unsupported GLIF format version: {formatVersion!r}" + ) + # start + if validate and not isinstance(glyphName, str): + raise GlifLibError("The glyph name is not properly formatted.") + if validate and len(glyphName) == 0: + raise GlifLibError("The glyph name is empty.") + glyphAttrs = OrderedDict( + [("name", glyphName), ("format", repr(formatVersion.major))] + ) + if formatVersion.minor != 0: + glyphAttrs["formatMinor"] = repr(formatVersion.minor) + root = etree.Element("glyph", glyphAttrs) + identifiers = set() + # advance + _writeAdvance(glyphObject, root, validate) + # unicodes + if getattr(glyphObject, "unicodes", None): + _writeUnicodes(glyphObject, root, validate) + # note + if getattr(glyphObject, "note", None): + _writeNote(glyphObject, root, validate) + # image + if formatVersion.major >= 2 and getattr(glyphObject, "image", None): + _writeImage(glyphObject, root, validate) + # guidelines + if formatVersion.major >= 2 and getattr(glyphObject, "guidelines", None): + _writeGuidelines(glyphObject, root, identifiers, validate) + # anchors + anchors = getattr(glyphObject, "anchors", None) + if formatVersion.major >= 2 and anchors: + _writeAnchors(glyphObject, root, identifiers, validate) + # outline + if drawPointsFunc is not None: + outline = etree.SubElement(root, "outline") + pen = GLIFPointPen(outline, identifiers=identifiers, validate=validate) + drawPointsFunc(pen) + if formatVersion.major == 1 and anchors: + _writeAnchorsFormat1(pen, anchors, validate) + # prevent lxml from writing self-closing tags + if not len(outline): + outline.text = "\n " + # lib + if getattr(glyphObject, "lib", None): + _writeLib(glyphObject, root, validate) + # return the text + data = etree.tostring( + root, encoding="UTF-8", xml_declaration=True, pretty_print=True + ) + return data + + +def writeGlyphToString( + glyphName, + glyphObject=None, + drawPointsFunc=None, + formatVersion=None, + validate=True, +): + """ + Return .glif data for a glyph as a string. The XML declaration's + encoding is always set to "UTF-8". + The 'glyphObject' argument can be any kind of object (even None); + the writeGlyphToString() method will attempt to get the following + attributes from it: + + width + the advance width of the glyph + height + the advance height of the glyph + unicodes + a list of unicode values for this glyph + note + a string + lib + a dictionary containing custom data + image + a dictionary containing image data + guidelines + a list of guideline data dictionaries + anchors + a list of anchor data dictionaries + + All attributes are optional: if 'glyphObject' doesn't + have the attribute, it will simply be skipped. + + To write outline data to the .glif file, writeGlyphToString() needs + a function (any callable object actually) that will take one + argument: an object that conforms to the PointPen protocol. + The function will be called by writeGlyphToString(); it has to call the + proper PointPen methods to transfer the outline to the .glif file. + + The GLIF format version can be specified with the formatVersion argument. + This accepts either a tuple of integers for (major, minor), or a single + integer for the major digit only (with minor digit implied as 0). + By default when formatVesion is None the latest GLIF format version will + be used; currently it's 2.0, which is equivalent to formatVersion=(2, 0). + + An UnsupportedGLIFFormat exception is raised if the requested UFO + formatVersion is not supported. + + ``validate`` will validate the written data. It is set to ``True`` by default. + """ + data = _writeGlyphToBytes( + glyphName, + glyphObject=glyphObject, + drawPointsFunc=drawPointsFunc, + formatVersion=formatVersion, + validate=validate, + ) + return data.decode("utf-8") + + +def _writeAdvance(glyphObject, element, validate): + width = getattr(glyphObject, "width", None) + if width is not None: + if validate and not isinstance(width, numberTypes): + raise GlifLibError("width attribute must be int or float") + if width == 0: + width = None + height = getattr(glyphObject, "height", None) + if height is not None: + if validate and not isinstance(height, numberTypes): + raise GlifLibError("height attribute must be int or float") + if height == 0: + height = None + if width is not None and height is not None: + etree.SubElement( + element, + "advance", + OrderedDict([("height", repr(height)), ("width", repr(width))]), + ) + elif width is not None: + etree.SubElement(element, "advance", dict(width=repr(width))) + elif height is not None: + etree.SubElement(element, "advance", dict(height=repr(height))) + + +def _writeUnicodes(glyphObject, element, validate): + unicodes = getattr(glyphObject, "unicodes", None) + if validate and isinstance(unicodes, int): + unicodes = [unicodes] + seen = set() + for code in unicodes: + if validate and not isinstance(code, int): + raise GlifLibError("unicode values must be int") + if code in seen: + continue + seen.add(code) + hexCode = "%04X" % code + etree.SubElement(element, "unicode", dict(hex=hexCode)) + + +def _writeNote(glyphObject, element, validate): + note = getattr(glyphObject, "note", None) + if validate and not isinstance(note, str): + raise GlifLibError("note attribute must be str") + note = note.strip() + note = "\n" + note + "\n" + etree.SubElement(element, "note").text = note + + +def _writeImage(glyphObject, element, validate): + image = getattr(glyphObject, "image", None) + if validate and not imageValidator(image): + raise GlifLibError( + "image attribute must be a dict or dict-like object with the proper structure." + ) + attrs = OrderedDict([("fileName", image["fileName"])]) + for attr, default in _transformationInfo: + value = image.get(attr, default) + if value != default: + attrs[attr] = repr(value) + color = image.get("color") + if color is not None: + attrs["color"] = color + etree.SubElement(element, "image", attrs) + + +def _writeGuidelines(glyphObject, element, identifiers, validate): + guidelines = getattr(glyphObject, "guidelines", []) + if validate and not guidelinesValidator(guidelines): + raise GlifLibError("guidelines attribute does not have the proper structure.") + for guideline in guidelines: + attrs = OrderedDict() + x = guideline.get("x") + if x is not None: + attrs["x"] = repr(x) + y = guideline.get("y") + if y is not None: + attrs["y"] = repr(y) + angle = guideline.get("angle") + if angle is not None: + attrs["angle"] = repr(angle) + name = guideline.get("name") + if name is not None: + attrs["name"] = name + color = guideline.get("color") + if color is not None: + attrs["color"] = color + identifier = guideline.get("identifier") + if identifier is not None: + if validate and identifier in identifiers: + raise GlifLibError("identifier used more than once: %s" % identifier) + attrs["identifier"] = identifier + identifiers.add(identifier) + etree.SubElement(element, "guideline", attrs) + + +def _writeAnchorsFormat1(pen, anchors, validate): + if validate and not anchorsValidator(anchors): + raise GlifLibError("anchors attribute does not have the proper structure.") + for anchor in anchors: + attrs = {} + x = anchor["x"] + attrs["x"] = repr(x) + y = anchor["y"] + attrs["y"] = repr(y) + name = anchor.get("name") + if name is not None: + attrs["name"] = name + pen.beginPath() + pen.addPoint((x, y), segmentType="move", name=name) + pen.endPath() + + +def _writeAnchors(glyphObject, element, identifiers, validate): + anchors = getattr(glyphObject, "anchors", []) + if validate and not anchorsValidator(anchors): + raise GlifLibError("anchors attribute does not have the proper structure.") + for anchor in anchors: + attrs = OrderedDict() + x = anchor["x"] + attrs["x"] = repr(x) + y = anchor["y"] + attrs["y"] = repr(y) + name = anchor.get("name") + if name is not None: + attrs["name"] = name + color = anchor.get("color") + if color is not None: + attrs["color"] = color + identifier = anchor.get("identifier") + if identifier is not None: + if validate and identifier in identifiers: + raise GlifLibError("identifier used more than once: %s" % identifier) + attrs["identifier"] = identifier + identifiers.add(identifier) + etree.SubElement(element, "anchor", attrs) + + +def _writeLib(glyphObject, element, validate): + lib = getattr(glyphObject, "lib", None) + if not lib: + # don't write empty lib + return + if validate: + valid, message = glyphLibValidator(lib) + if not valid: + raise GlifLibError(message) + if not isinstance(lib, dict): + lib = dict(lib) + # plist inside GLIF begins with 2 levels of indentation + e = plistlib.totree(lib, indent_level=2) + etree.SubElement(element, "lib").append(e) + + +# ----------------------- +# layerinfo.plist Support +# ----------------------- + +layerInfoVersion3ValueData = { + "color": dict(type=str, valueValidator=colorValidator), + "lib": dict(type=dict, valueValidator=genericTypeValidator), +} + + +def validateLayerInfoVersion3ValueForAttribute(attr, value): + """ + This performs very basic validation of the value for attribute + following the UFO 3 fontinfo.plist specification. The results + of this should not be interpretted as *correct* for the font + that they are part of. This merely indicates that the value + is of the proper type and, where the specification defines + a set range of possible values for an attribute, that the + value is in the accepted range. + """ + if attr not in layerInfoVersion3ValueData: + return False + dataValidationDict = layerInfoVersion3ValueData[attr] + valueType = dataValidationDict.get("type") + validator = dataValidationDict.get("valueValidator") + valueOptions = dataValidationDict.get("valueOptions") + # have specific options for the validator + if valueOptions is not None: + isValidValue = validator(value, valueOptions) + # no specific options + else: + if validator == genericTypeValidator: + isValidValue = validator(value, valueType) + else: + isValidValue = validator(value) + return isValidValue + + +def validateLayerInfoVersion3Data(infoData): + """ + This performs very basic validation of the value for infoData + following the UFO 3 layerinfo.plist specification. The results + of this should not be interpretted as *correct* for the font + that they are part of. This merely indicates that the values + are of the proper type and, where the specification defines + a set range of possible values for an attribute, that the + value is in the accepted range. + """ + for attr, value in infoData.items(): + if attr not in layerInfoVersion3ValueData: + raise GlifLibError("Unknown attribute %s." % attr) + isValidValue = validateLayerInfoVersion3ValueForAttribute(attr, value) + if not isValidValue: + raise GlifLibError(f"Invalid value for attribute {attr} ({value!r}).") + return infoData + + +# ----------------- +# GLIF Tree Support +# ----------------- + + +def _glifTreeFromFile(aFile): + if etree._have_lxml: + tree = etree.parse(aFile, parser=etree.XMLParser(remove_comments=True)) + else: + tree = etree.parse(aFile) + root = tree.getroot() + if root.tag != "glyph": + raise GlifLibError("The GLIF is not properly formatted.") + if root.text and root.text.strip() != "": + raise GlifLibError("Invalid GLIF structure.") + return root + + +def _glifTreeFromString(aString): + data = tobytes(aString, encoding="utf-8") + try: + if etree._have_lxml: + root = etree.fromstring(data, parser=etree.XMLParser(remove_comments=True)) + else: + root = etree.fromstring(data) + except Exception as etree_exception: + raise GlifLibError("GLIF contains invalid XML.") from etree_exception + + if root.tag != "glyph": + raise GlifLibError("The GLIF is not properly formatted.") + if root.text and root.text.strip() != "": + raise GlifLibError("Invalid GLIF structure.") + return root + + +def _readGlyphFromTree( + tree, + glyphObject=None, + pointPen=None, + formatVersions=GLIFFormatVersion.supported_versions(), + validate=True, +): + # check the format version + formatVersionMajor = tree.get("format") + if validate and formatVersionMajor is None: + raise GlifLibError("Unspecified format version in GLIF.") + formatVersionMinor = tree.get("formatMinor", 0) + try: + formatVersion = GLIFFormatVersion( + (int(formatVersionMajor), int(formatVersionMinor)) + ) + except ValueError as e: + msg = "Unsupported GLIF format: %s.%s" % ( + formatVersionMajor, + formatVersionMinor, + ) + if validate: + from fontTools.ufoLib.errors import UnsupportedGLIFFormat + + raise UnsupportedGLIFFormat(msg) from e + # warn but continue using the latest supported format + formatVersion = GLIFFormatVersion.default() + logger.warning( + "%s. Assuming the latest supported version (%s). " + "Some data may be skipped or parsed incorrectly.", + msg, + formatVersion, + ) + + if validate and formatVersion not in formatVersions: + raise GlifLibError(f"Forbidden GLIF format version: {formatVersion!s}") + + try: + readGlyphFromTree = _READ_GLYPH_FROM_TREE_FUNCS[formatVersion] + except KeyError: + raise NotImplementedError(formatVersion) + + readGlyphFromTree( + tree=tree, + glyphObject=glyphObject, + pointPen=pointPen, + validate=validate, + formatMinor=formatVersion.minor, + ) + + +def _readGlyphFromTreeFormat1( + tree, glyphObject=None, pointPen=None, validate=None, **kwargs +): + # get the name + _readName(glyphObject, tree, validate) + # populate the sub elements + unicodes = [] + haveSeenAdvance = haveSeenOutline = haveSeenLib = haveSeenNote = False + for element in tree: + if element.tag == "outline": + if validate: + if haveSeenOutline: + raise GlifLibError("The outline element occurs more than once.") + if element.attrib: + raise GlifLibError( + "The outline element contains unknown attributes." + ) + if element.text and element.text.strip() != "": + raise GlifLibError("Invalid outline structure.") + haveSeenOutline = True + buildOutlineFormat1(glyphObject, pointPen, element, validate) + elif glyphObject is None: + continue + elif element.tag == "advance": + if validate and haveSeenAdvance: + raise GlifLibError("The advance element occurs more than once.") + haveSeenAdvance = True + _readAdvance(glyphObject, element) + elif element.tag == "unicode": + try: + v = element.get("hex") + v = int(v, 16) + if v not in unicodes: + unicodes.append(v) + except ValueError: + raise GlifLibError( + "Illegal value for hex attribute of unicode element." + ) + elif element.tag == "note": + if validate and haveSeenNote: + raise GlifLibError("The note element occurs more than once.") + haveSeenNote = True + _readNote(glyphObject, element) + elif element.tag == "lib": + if validate and haveSeenLib: + raise GlifLibError("The lib element occurs more than once.") + haveSeenLib = True + _readLib(glyphObject, element, validate) + else: + raise GlifLibError("Unknown element in GLIF: %s" % element) + # set the collected unicodes + if unicodes: + _relaxedSetattr(glyphObject, "unicodes", unicodes) + + +def _readGlyphFromTreeFormat2( + tree, glyphObject=None, pointPen=None, validate=None, formatMinor=0 +): + # get the name + _readName(glyphObject, tree, validate) + # populate the sub elements + unicodes = [] + guidelines = [] + anchors = [] + haveSeenAdvance = haveSeenImage = haveSeenOutline = haveSeenLib = haveSeenNote = ( + False + ) + identifiers = set() + for element in tree: + if element.tag == "outline": + if validate: + if haveSeenOutline: + raise GlifLibError("The outline element occurs more than once.") + if element.attrib: + raise GlifLibError( + "The outline element contains unknown attributes." + ) + if element.text and element.text.strip() != "": + raise GlifLibError("Invalid outline structure.") + haveSeenOutline = True + if pointPen is not None: + buildOutlineFormat2( + glyphObject, pointPen, element, identifiers, validate + ) + elif glyphObject is None: + continue + elif element.tag == "advance": + if validate and haveSeenAdvance: + raise GlifLibError("The advance element occurs more than once.") + haveSeenAdvance = True + _readAdvance(glyphObject, element) + elif element.tag == "unicode": + try: + v = element.get("hex") + v = int(v, 16) + if v not in unicodes: + unicodes.append(v) + except ValueError: + raise GlifLibError( + "Illegal value for hex attribute of unicode element." + ) + elif element.tag == "guideline": + if validate and len(element): + raise GlifLibError("Unknown children in guideline element.") + attrib = dict(element.attrib) + for attr in ("x", "y", "angle"): + if attr in attrib: + attrib[attr] = _number(attrib[attr]) + guidelines.append(attrib) + elif element.tag == "anchor": + if validate and len(element): + raise GlifLibError("Unknown children in anchor element.") + attrib = dict(element.attrib) + for attr in ("x", "y"): + if attr in element.attrib: + attrib[attr] = _number(attrib[attr]) + anchors.append(attrib) + elif element.tag == "image": + if validate: + if haveSeenImage: + raise GlifLibError("The image element occurs more than once.") + if len(element): + raise GlifLibError("Unknown children in image element.") + haveSeenImage = True + _readImage(glyphObject, element, validate) + elif element.tag == "note": + if validate and haveSeenNote: + raise GlifLibError("The note element occurs more than once.") + haveSeenNote = True + _readNote(glyphObject, element) + elif element.tag == "lib": + if validate and haveSeenLib: + raise GlifLibError("The lib element occurs more than once.") + haveSeenLib = True + _readLib(glyphObject, element, validate) + else: + raise GlifLibError("Unknown element in GLIF: %s" % element) + # set the collected unicodes + if unicodes: + _relaxedSetattr(glyphObject, "unicodes", unicodes) + # set the collected guidelines + if guidelines: + if validate and not guidelinesValidator(guidelines, identifiers): + raise GlifLibError("The guidelines are improperly formatted.") + _relaxedSetattr(glyphObject, "guidelines", guidelines) + # set the collected anchors + if anchors: + if validate and not anchorsValidator(anchors, identifiers): + raise GlifLibError("The anchors are improperly formatted.") + _relaxedSetattr(glyphObject, "anchors", anchors) + + +_READ_GLYPH_FROM_TREE_FUNCS = { + GLIFFormatVersion.FORMAT_1_0: _readGlyphFromTreeFormat1, + GLIFFormatVersion.FORMAT_2_0: _readGlyphFromTreeFormat2, +} + + +def _readName(glyphObject, root, validate): + glyphName = root.get("name") + if validate and not glyphName: + raise GlifLibError("Empty glyph name in GLIF.") + if glyphName and glyphObject is not None: + _relaxedSetattr(glyphObject, "name", glyphName) + + +def _readAdvance(glyphObject, advance): + width = _number(advance.get("width", 0)) + _relaxedSetattr(glyphObject, "width", width) + height = _number(advance.get("height", 0)) + _relaxedSetattr(glyphObject, "height", height) + + +def _readNote(glyphObject, note): + lines = note.text.split("\n") + note = "\n".join(line.strip() for line in lines if line.strip()) + _relaxedSetattr(glyphObject, "note", note) + + +def _readLib(glyphObject, lib, validate): + assert len(lib) == 1 + child = lib[0] + plist = plistlib.fromtree(child) + if validate: + valid, message = glyphLibValidator(plist) + if not valid: + raise GlifLibError(message) + _relaxedSetattr(glyphObject, "lib", plist) + + +def _readImage(glyphObject, image, validate): + imageData = dict(image.attrib) + for attr, default in _transformationInfo: + value = imageData.get(attr, default) + imageData[attr] = _number(value) + if validate and not imageValidator(imageData): + raise GlifLibError("The image element is not properly formatted.") + _relaxedSetattr(glyphObject, "image", imageData) + + +# ---------------- +# GLIF to PointPen +# ---------------- + +contourAttributesFormat2 = {"identifier"} +componentAttributesFormat1 = { + "base", + "xScale", + "xyScale", + "yxScale", + "yScale", + "xOffset", + "yOffset", +} +componentAttributesFormat2 = componentAttributesFormat1 | {"identifier"} +pointAttributesFormat1 = {"x", "y", "type", "smooth", "name"} +pointAttributesFormat2 = pointAttributesFormat1 | {"identifier"} +pointSmoothOptions = {"no", "yes"} +pointTypeOptions = {"move", "line", "offcurve", "curve", "qcurve"} + +# format 1 + + +def buildOutlineFormat1(glyphObject, pen, outline, validate): + anchors = [] + for element in outline: + if element.tag == "contour": + if len(element) == 1: + point = element[0] + if point.tag == "point": + anchor = _buildAnchorFormat1(point, validate) + if anchor is not None: + anchors.append(anchor) + continue + if pen is not None: + _buildOutlineContourFormat1(pen, element, validate) + elif element.tag == "component": + if pen is not None: + _buildOutlineComponentFormat1(pen, element, validate) + else: + raise GlifLibError("Unknown element in outline element: %s" % element) + if glyphObject is not None and anchors: + if validate and not anchorsValidator(anchors): + raise GlifLibError("GLIF 1 anchors are not properly formatted.") + _relaxedSetattr(glyphObject, "anchors", anchors) + + +def _buildAnchorFormat1(point, validate): + if point.get("type") != "move": + return None + name = point.get("name") + if name is None: + return None + x = point.get("x") + y = point.get("y") + if validate and x is None: + raise GlifLibError("Required x attribute is missing in point element.") + if validate and y is None: + raise GlifLibError("Required y attribute is missing in point element.") + x = _number(x) + y = _number(y) + anchor = dict(x=x, y=y, name=name) + return anchor + + +def _buildOutlineContourFormat1(pen, contour, validate): + if validate and contour.attrib: + raise GlifLibError("Unknown attributes in contour element.") + pen.beginPath() + if len(contour): + massaged = _validateAndMassagePointStructures( + contour, + pointAttributesFormat1, + openContourOffCurveLeniency=True, + validate=validate, + ) + _buildOutlinePointsFormat1(pen, massaged) + pen.endPath() + + +def _buildOutlinePointsFormat1(pen, contour): + for point in contour: + x = point["x"] + y = point["y"] + segmentType = point["segmentType"] + smooth = point["smooth"] + name = point["name"] + pen.addPoint((x, y), segmentType=segmentType, smooth=smooth, name=name) + + +def _buildOutlineComponentFormat1(pen, component, validate): + if validate: + if len(component): + raise GlifLibError("Unknown child elements of component element.") + for attr in component.attrib.keys(): + if attr not in componentAttributesFormat1: + raise GlifLibError("Unknown attribute in component element: %s" % attr) + baseGlyphName = component.get("base") + if validate and baseGlyphName is None: + raise GlifLibError("The base attribute is not defined in the component.") + transformation = [] + for attr, default in _transformationInfo: + value = component.get(attr) + if value is None: + value = default + else: + value = _number(value) + transformation.append(value) + pen.addComponent(baseGlyphName, tuple(transformation)) + + +# format 2 + + +def buildOutlineFormat2(glyphObject, pen, outline, identifiers, validate): + for element in outline: + if element.tag == "contour": + _buildOutlineContourFormat2(pen, element, identifiers, validate) + elif element.tag == "component": + _buildOutlineComponentFormat2(pen, element, identifiers, validate) + else: + raise GlifLibError("Unknown element in outline element: %s" % element.tag) + + +def _buildOutlineContourFormat2(pen, contour, identifiers, validate): + if validate: + for attr in contour.attrib.keys(): + if attr not in contourAttributesFormat2: + raise GlifLibError("Unknown attribute in contour element: %s" % attr) + identifier = contour.get("identifier") + if identifier is not None: + if validate: + if identifier in identifiers: + raise GlifLibError( + "The identifier %s is used more than once." % identifier + ) + if not identifierValidator(identifier): + raise GlifLibError( + "The contour identifier %s is not valid." % identifier + ) + identifiers.add(identifier) + try: + pen.beginPath(identifier=identifier) + except TypeError: + pen.beginPath() + warn( + "The beginPath method needs an identifier kwarg. The contour's identifier value has been discarded.", + DeprecationWarning, + ) + if len(contour): + massaged = _validateAndMassagePointStructures( + contour, pointAttributesFormat2, validate=validate + ) + _buildOutlinePointsFormat2(pen, massaged, identifiers, validate) + pen.endPath() + + +def _buildOutlinePointsFormat2(pen, contour, identifiers, validate): + for point in contour: + x = point["x"] + y = point["y"] + segmentType = point["segmentType"] + smooth = point["smooth"] + name = point["name"] + identifier = point.get("identifier") + if identifier is not None: + if validate: + if identifier in identifiers: + raise GlifLibError( + "The identifier %s is used more than once." % identifier + ) + if not identifierValidator(identifier): + raise GlifLibError("The identifier %s is not valid." % identifier) + identifiers.add(identifier) + try: + pen.addPoint( + (x, y), + segmentType=segmentType, + smooth=smooth, + name=name, + identifier=identifier, + ) + except TypeError: + pen.addPoint((x, y), segmentType=segmentType, smooth=smooth, name=name) + warn( + "The addPoint method needs an identifier kwarg. The point's identifier value has been discarded.", + DeprecationWarning, + ) + + +def _buildOutlineComponentFormat2(pen, component, identifiers, validate): + if validate: + if len(component): + raise GlifLibError("Unknown child elements of component element.") + for attr in component.attrib.keys(): + if attr not in componentAttributesFormat2: + raise GlifLibError("Unknown attribute in component element: %s" % attr) + baseGlyphName = component.get("base") + if validate and baseGlyphName is None: + raise GlifLibError("The base attribute is not defined in the component.") + transformation = [] + for attr, default in _transformationInfo: + value = component.get(attr) + if value is None: + value = default + else: + value = _number(value) + transformation.append(value) + identifier = component.get("identifier") + if identifier is not None: + if validate: + if identifier in identifiers: + raise GlifLibError( + "The identifier %s is used more than once." % identifier + ) + if validate and not identifierValidator(identifier): + raise GlifLibError("The identifier %s is not valid." % identifier) + identifiers.add(identifier) + try: + pen.addComponent(baseGlyphName, tuple(transformation), identifier=identifier) + except TypeError: + pen.addComponent(baseGlyphName, tuple(transformation)) + warn( + "The addComponent method needs an identifier kwarg. The component's identifier value has been discarded.", + DeprecationWarning, + ) + + +# all formats + + +def _validateAndMassagePointStructures( + contour, pointAttributes, openContourOffCurveLeniency=False, validate=True +): + if not len(contour): + return + # store some data for later validation + lastOnCurvePoint = None + haveOffCurvePoint = False + # validate and massage the individual point elements + massaged = [] + for index, element in enumerate(contour): + # not + if element.tag != "point": + raise GlifLibError( + "Unknown child element (%s) of contour element." % element.tag + ) + point = dict(element.attrib) + massaged.append(point) + if validate: + # unknown attributes + for attr in point.keys(): + if attr not in pointAttributes: + raise GlifLibError("Unknown attribute in point element: %s" % attr) + # search for unknown children + if len(element): + raise GlifLibError("Unknown child elements in point element.") + # x and y are required + for attr in ("x", "y"): + try: + point[attr] = _number(point[attr]) + except KeyError as e: + raise GlifLibError( + f"Required {attr} attribute is missing in point element." + ) from e + # segment type + pointType = point.pop("type", "offcurve") + if validate and pointType not in pointTypeOptions: + raise GlifLibError("Unknown point type: %s" % pointType) + if pointType == "offcurve": + pointType = None + point["segmentType"] = pointType + if pointType is None: + haveOffCurvePoint = True + else: + lastOnCurvePoint = index + # move can only occur as the first point + if validate and pointType == "move" and index != 0: + raise GlifLibError( + "A move point occurs after the first point in the contour." + ) + # smooth is optional + smooth = point.get("smooth", "no") + if validate and smooth is not None: + if smooth not in pointSmoothOptions: + raise GlifLibError("Unknown point smooth value: %s" % smooth) + smooth = smooth == "yes" + point["smooth"] = smooth + # smooth can only be applied to curve and qcurve + if validate and smooth and pointType is None: + raise GlifLibError("smooth attribute set in an offcurve point.") + # name is optional + if "name" not in element.attrib: + point["name"] = None + if openContourOffCurveLeniency: + # remove offcurves that precede a move. this is technically illegal, + # but we let it slide because there are fonts out there in the wild like this. + if massaged[0]["segmentType"] == "move": + count = 0 + for point in reversed(massaged): + if point["segmentType"] is None: + count += 1 + else: + break + if count: + massaged = massaged[:-count] + # validate the off-curves in the segments + if validate and haveOffCurvePoint and lastOnCurvePoint is not None: + # we only care about how many offCurves there are before an onCurve + # filter out the trailing offCurves + offCurvesCount = len(massaged) - 1 - lastOnCurvePoint + for point in massaged: + segmentType = point["segmentType"] + if segmentType is None: + offCurvesCount += 1 + else: + if offCurvesCount: + # move and line can't be preceded by off-curves + if segmentType == "move": + # this will have been filtered out already + raise GlifLibError("move can not have an offcurve.") + elif segmentType == "line": + raise GlifLibError("line can not have an offcurve.") + elif segmentType == "curve": + if offCurvesCount > 2: + raise GlifLibError("Too many offcurves defined for curve.") + elif segmentType == "qcurve": + pass + else: + # unknown segment type. it'll be caught later. + pass + offCurvesCount = 0 + return massaged + + +# --------------------- +# Misc Helper Functions +# --------------------- + + +def _relaxedSetattr(object, attr, value): + try: + setattr(object, attr, value) + except AttributeError: + pass + + +def _number(s): + """ + Given a numeric string, return an integer or a float, whichever + the string indicates. _number("1") will return the integer 1, + _number("1.0") will return the float 1.0. + + >>> _number("1") + 1 + >>> _number("1.0") + 1.0 + >>> _number("a") # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + GlifLibError: Could not convert a to an int or float. + """ + try: + n = int(s) + return n + except ValueError: + pass + try: + n = float(s) + return n + except ValueError: + raise GlifLibError("Could not convert %s to an int or float." % s) + + +# -------------------- +# Rapid Value Fetching +# -------------------- + +# base + + +class _DoneParsing(Exception): + pass + + +class _BaseParser: + def __init__(self): + self._elementStack = [] + + def parse(self, text): + from xml.parsers.expat import ParserCreate + + parser = ParserCreate() + parser.StartElementHandler = self.startElementHandler + parser.EndElementHandler = self.endElementHandler + parser.Parse(text) + + def startElementHandler(self, name, attrs): + self._elementStack.append(name) + + def endElementHandler(self, name): + other = self._elementStack.pop(-1) + assert other == name + + +# unicodes + + +def _fetchUnicodes(glif): + """ + Get a list of unicodes listed in glif. + """ + parser = _FetchUnicodesParser() + parser.parse(glif) + return parser.unicodes + + +class _FetchUnicodesParser(_BaseParser): + def __init__(self): + self.unicodes = [] + super().__init__() + + def startElementHandler(self, name, attrs): + if ( + name == "unicode" + and self._elementStack + and self._elementStack[-1] == "glyph" + ): + value = attrs.get("hex") + if value is not None: + try: + value = int(value, 16) + if value not in self.unicodes: + self.unicodes.append(value) + except ValueError: + pass + super().startElementHandler(name, attrs) + + +# image + + +def _fetchImageFileName(glif): + """ + The image file name (if any) from glif. + """ + parser = _FetchImageFileNameParser() + try: + parser.parse(glif) + except _DoneParsing: + pass + return parser.fileName + + +class _FetchImageFileNameParser(_BaseParser): + def __init__(self): + self.fileName = None + super().__init__() + + def startElementHandler(self, name, attrs): + if name == "image" and self._elementStack and self._elementStack[-1] == "glyph": + self.fileName = attrs.get("fileName") + raise _DoneParsing + super().startElementHandler(name, attrs) + + +# component references + + +def _fetchComponentBases(glif): + """ + Get a list of component base glyphs listed in glif. + """ + parser = _FetchComponentBasesParser() + try: + parser.parse(glif) + except _DoneParsing: + pass + return list(parser.bases) + + +class _FetchComponentBasesParser(_BaseParser): + def __init__(self): + self.bases = [] + super().__init__() + + def startElementHandler(self, name, attrs): + if ( + name == "component" + and self._elementStack + and self._elementStack[-1] == "outline" + ): + base = attrs.get("base") + if base is not None: + self.bases.append(base) + super().startElementHandler(name, attrs) + + def endElementHandler(self, name): + if name == "outline": + raise _DoneParsing + super().endElementHandler(name) + + +# -------------- +# GLIF Point Pen +# -------------- + +_transformationInfo = [ + # field name, default value + ("xScale", 1), + ("xyScale", 0), + ("yxScale", 0), + ("yScale", 1), + ("xOffset", 0), + ("yOffset", 0), +] + + +class GLIFPointPen(AbstractPointPen): + """ + Helper class using the PointPen protocol to write the + part of .glif files. + """ + + def __init__(self, element, formatVersion=None, identifiers=None, validate=True): + if identifiers is None: + identifiers = set() + self.formatVersion = GLIFFormatVersion(formatVersion) + self.identifiers = identifiers + self.outline = element + self.contour = None + self.prevOffCurveCount = 0 + self.prevPointTypes = [] + self.validate = validate + + def beginPath(self, identifier=None, **kwargs): + attrs = OrderedDict() + if identifier is not None and self.formatVersion.major >= 2: + if self.validate: + if identifier in self.identifiers: + raise GlifLibError( + "identifier used more than once: %s" % identifier + ) + if not identifierValidator(identifier): + raise GlifLibError( + "identifier not formatted properly: %s" % identifier + ) + attrs["identifier"] = identifier + self.identifiers.add(identifier) + self.contour = etree.SubElement(self.outline, "contour", attrs) + self.prevOffCurveCount = 0 + + def endPath(self): + if self.prevPointTypes and self.prevPointTypes[0] == "move": + if self.validate and self.prevPointTypes[-1] == "offcurve": + raise GlifLibError("open contour has loose offcurve point") + # prevent lxml from writing self-closing tags + if not len(self.contour): + self.contour.text = "\n " + self.contour = None + self.prevPointType = None + self.prevOffCurveCount = 0 + self.prevPointTypes = [] + + def addPoint( + self, pt, segmentType=None, smooth=None, name=None, identifier=None, **kwargs + ): + attrs = OrderedDict() + # coordinates + if pt is not None: + if self.validate: + for coord in pt: + if not isinstance(coord, numberTypes): + raise GlifLibError("coordinates must be int or float") + attrs["x"] = repr(pt[0]) + attrs["y"] = repr(pt[1]) + # segment type + if segmentType == "offcurve": + segmentType = None + if self.validate: + if segmentType == "move" and self.prevPointTypes: + raise GlifLibError( + "move occurs after a point has already been added to the contour." + ) + if ( + segmentType in ("move", "line") + and self.prevPointTypes + and self.prevPointTypes[-1] == "offcurve" + ): + raise GlifLibError("offcurve occurs before %s point." % segmentType) + if segmentType == "curve" and self.prevOffCurveCount > 2: + raise GlifLibError("too many offcurve points before curve point.") + if segmentType is not None: + attrs["type"] = segmentType + else: + segmentType = "offcurve" + if segmentType == "offcurve": + self.prevOffCurveCount += 1 + else: + self.prevOffCurveCount = 0 + self.prevPointTypes.append(segmentType) + # smooth + if smooth: + if self.validate and segmentType == "offcurve": + raise GlifLibError("can't set smooth in an offcurve point.") + attrs["smooth"] = "yes" + # name + if name is not None: + attrs["name"] = name + # identifier + if identifier is not None and self.formatVersion.major >= 2: + if self.validate: + if identifier in self.identifiers: + raise GlifLibError( + "identifier used more than once: %s" % identifier + ) + if not identifierValidator(identifier): + raise GlifLibError( + "identifier not formatted properly: %s" % identifier + ) + attrs["identifier"] = identifier + self.identifiers.add(identifier) + etree.SubElement(self.contour, "point", attrs) + + def addComponent(self, glyphName, transformation, identifier=None, **kwargs): + attrs = OrderedDict([("base", glyphName)]) + for (attr, default), value in zip(_transformationInfo, transformation): + if self.validate and not isinstance(value, numberTypes): + raise GlifLibError("transformation values must be int or float") + if value != default: + attrs[attr] = repr(value) + if identifier is not None and self.formatVersion.major >= 2: + if self.validate: + if identifier in self.identifiers: + raise GlifLibError( + "identifier used more than once: %s" % identifier + ) + if self.validate and not identifierValidator(identifier): + raise GlifLibError( + "identifier not formatted properly: %s" % identifier + ) + attrs["identifier"] = identifier + self.identifiers.add(identifier) + etree.SubElement(self.outline, "component", attrs) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/plistlib.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/plistlib.py new file mode 100644 index 0000000000000000000000000000000000000000..38bb266b21ddf329c2bf6dc90efcc95e17b5fffc --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/plistlib.py @@ -0,0 +1,47 @@ +"""DEPRECATED - This module is kept here only as a backward compatibility shim +for the old ufoLib.plistlib module, which was moved to fontTools.misc.plistlib. +Please use the latter instead. +""" + +from fontTools.misc.plistlib import dump, dumps, load, loads +from fontTools.misc.textTools import tobytes + +# The following functions were part of the old py2-like ufoLib.plistlib API. +# They are kept only for backward compatiblity. +from fontTools.ufoLib.utils import deprecated + + +@deprecated("Use 'fontTools.misc.plistlib.load' instead") +def readPlist(path_or_file): + did_open = False + if isinstance(path_or_file, str): + path_or_file = open(path_or_file, "rb") + did_open = True + try: + return load(path_or_file, use_builtin_types=False) + finally: + if did_open: + path_or_file.close() + + +@deprecated("Use 'fontTools.misc.plistlib.dump' instead") +def writePlist(value, path_or_file): + did_open = False + if isinstance(path_or_file, str): + path_or_file = open(path_or_file, "wb") + did_open = True + try: + dump(value, path_or_file, use_builtin_types=False) + finally: + if did_open: + path_or_file.close() + + +@deprecated("Use 'fontTools.misc.plistlib.loads' instead") +def readPlistFromString(data): + return loads(tobytes(data, encoding="utf-8"), use_builtin_types=False) + + +@deprecated("Use 'fontTools.misc.plistlib.dumps' instead") +def writePlistToString(value): + return dumps(value, use_builtin_types=False) diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/utils.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..45ec1c564b71712311564fbace72eaa3d2f2c085 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/ufoLib/utils.py @@ -0,0 +1,76 @@ +"""The module contains miscellaneous helpers. +It's not considered part of the public ufoLib API. +""" + +import warnings +import functools + + +numberTypes = (int, float) + + +def deprecated(msg=""): + """Decorator factory to mark functions as deprecated with given message. + + >>> @deprecated("Enough!") + ... def some_function(): + ... "I just print 'hello world'." + ... print("hello world") + >>> some_function() + hello world + >>> some_function.__doc__ == "I just print 'hello world'." + True + """ + + def deprecated_decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + warnings.warn( + f"{func.__name__} function is a deprecated. {msg}", + category=DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + return deprecated_decorator + + +# To be mixed with enum.Enum in UFOFormatVersion and GLIFFormatVersion +class _VersionTupleEnumMixin: + @property + def major(self): + return self.value[0] + + @property + def minor(self): + return self.value[1] + + @classmethod + def _missing_(cls, value): + # allow to initialize a version enum from a single (major) integer + if isinstance(value, int): + return cls((value, 0)) + # or from None to obtain the current default version + if value is None: + return cls.default() + return super()._missing_(value) + + def __str__(self): + return f"{self.major}.{self.minor}" + + @classmethod + def default(cls): + # get the latest defined version (i.e. the max of all versions) + return max(cls.__members__.values()) + + @classmethod + def supported_versions(cls): + return frozenset(cls.__members__.values()) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/unicodedata/Scripts.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/unicodedata/Scripts.py new file mode 100644 index 0000000000000000000000000000000000000000..663998badeb98c95527dc21d95e0e433c134ae2a --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/unicodedata/Scripts.py @@ -0,0 +1,3618 @@ +# -*- coding: utf-8 -*- +# +# NOTE: This file was auto-generated with MetaTools/buildUCD.py. +# Source: https://unicode.org/Public/UNIDATA/Scripts.txt +# License: http://unicode.org/copyright.html#License +# +# Scripts-16.0.0.txt +# Date: 2024-04-30, 21:48:40 GMT +# © 2024 Unicode®, Inc. +# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. +# For terms of use and license, see https://www.unicode.org/terms_of_use.html +# +# Unicode Character Database +# For documentation, see https://www.unicode.org/reports/tr44/ +# For more information, see: +# UAX #24, Unicode Script Property: https://www.unicode.org/reports/tr24/ +# Especially the sections: +# https://www.unicode.org/reports/tr24/#Assignment_Script_Values +# https://www.unicode.org/reports/tr24/#Assignment_ScriptX_Values +# + + +RANGES = [ + 0x0000, # .. 0x0040 ; Common + 0x0041, # .. 0x005A ; Latin + 0x005B, # .. 0x0060 ; Common + 0x0061, # .. 0x007A ; Latin + 0x007B, # .. 0x00A9 ; Common + 0x00AA, # .. 0x00AA ; Latin + 0x00AB, # .. 0x00B9 ; Common + 0x00BA, # .. 0x00BA ; Latin + 0x00BB, # .. 0x00BF ; Common + 0x00C0, # .. 0x00D6 ; Latin + 0x00D7, # .. 0x00D7 ; Common + 0x00D8, # .. 0x00F6 ; Latin + 0x00F7, # .. 0x00F7 ; Common + 0x00F8, # .. 0x02B8 ; Latin + 0x02B9, # .. 0x02DF ; Common + 0x02E0, # .. 0x02E4 ; Latin + 0x02E5, # .. 0x02E9 ; Common + 0x02EA, # .. 0x02EB ; Bopomofo + 0x02EC, # .. 0x02FF ; Common + 0x0300, # .. 0x036F ; Inherited + 0x0370, # .. 0x0373 ; Greek + 0x0374, # .. 0x0374 ; Common + 0x0375, # .. 0x0377 ; Greek + 0x0378, # .. 0x0379 ; Unknown + 0x037A, # .. 0x037D ; Greek + 0x037E, # .. 0x037E ; Common + 0x037F, # .. 0x037F ; Greek + 0x0380, # .. 0x0383 ; Unknown + 0x0384, # .. 0x0384 ; Greek + 0x0385, # .. 0x0385 ; Common + 0x0386, # .. 0x0386 ; Greek + 0x0387, # .. 0x0387 ; Common + 0x0388, # .. 0x038A ; Greek + 0x038B, # .. 0x038B ; Unknown + 0x038C, # .. 0x038C ; Greek + 0x038D, # .. 0x038D ; Unknown + 0x038E, # .. 0x03A1 ; Greek + 0x03A2, # .. 0x03A2 ; Unknown + 0x03A3, # .. 0x03E1 ; Greek + 0x03E2, # .. 0x03EF ; Coptic + 0x03F0, # .. 0x03FF ; Greek + 0x0400, # .. 0x0484 ; Cyrillic + 0x0485, # .. 0x0486 ; Inherited + 0x0487, # .. 0x052F ; Cyrillic + 0x0530, # .. 0x0530 ; Unknown + 0x0531, # .. 0x0556 ; Armenian + 0x0557, # .. 0x0558 ; Unknown + 0x0559, # .. 0x058A ; Armenian + 0x058B, # .. 0x058C ; Unknown + 0x058D, # .. 0x058F ; Armenian + 0x0590, # .. 0x0590 ; Unknown + 0x0591, # .. 0x05C7 ; Hebrew + 0x05C8, # .. 0x05CF ; Unknown + 0x05D0, # .. 0x05EA ; Hebrew + 0x05EB, # .. 0x05EE ; Unknown + 0x05EF, # .. 0x05F4 ; Hebrew + 0x05F5, # .. 0x05FF ; Unknown + 0x0600, # .. 0x0604 ; Arabic + 0x0605, # .. 0x0605 ; Common + 0x0606, # .. 0x060B ; Arabic + 0x060C, # .. 0x060C ; Common + 0x060D, # .. 0x061A ; Arabic + 0x061B, # .. 0x061B ; Common + 0x061C, # .. 0x061E ; Arabic + 0x061F, # .. 0x061F ; Common + 0x0620, # .. 0x063F ; Arabic + 0x0640, # .. 0x0640 ; Common + 0x0641, # .. 0x064A ; Arabic + 0x064B, # .. 0x0655 ; Inherited + 0x0656, # .. 0x066F ; Arabic + 0x0670, # .. 0x0670 ; Inherited + 0x0671, # .. 0x06DC ; Arabic + 0x06DD, # .. 0x06DD ; Common + 0x06DE, # .. 0x06FF ; Arabic + 0x0700, # .. 0x070D ; Syriac + 0x070E, # .. 0x070E ; Unknown + 0x070F, # .. 0x074A ; Syriac + 0x074B, # .. 0x074C ; Unknown + 0x074D, # .. 0x074F ; Syriac + 0x0750, # .. 0x077F ; Arabic + 0x0780, # .. 0x07B1 ; Thaana + 0x07B2, # .. 0x07BF ; Unknown + 0x07C0, # .. 0x07FA ; Nko + 0x07FB, # .. 0x07FC ; Unknown + 0x07FD, # .. 0x07FF ; Nko + 0x0800, # .. 0x082D ; Samaritan + 0x082E, # .. 0x082F ; Unknown + 0x0830, # .. 0x083E ; Samaritan + 0x083F, # .. 0x083F ; Unknown + 0x0840, # .. 0x085B ; Mandaic + 0x085C, # .. 0x085D ; Unknown + 0x085E, # .. 0x085E ; Mandaic + 0x085F, # .. 0x085F ; Unknown + 0x0860, # .. 0x086A ; Syriac + 0x086B, # .. 0x086F ; Unknown + 0x0870, # .. 0x088E ; Arabic + 0x088F, # .. 0x088F ; Unknown + 0x0890, # .. 0x0891 ; Arabic + 0x0892, # .. 0x0896 ; Unknown + 0x0897, # .. 0x08E1 ; Arabic + 0x08E2, # .. 0x08E2 ; Common + 0x08E3, # .. 0x08FF ; Arabic + 0x0900, # .. 0x0950 ; Devanagari + 0x0951, # .. 0x0954 ; Inherited + 0x0955, # .. 0x0963 ; Devanagari + 0x0964, # .. 0x0965 ; Common + 0x0966, # .. 0x097F ; Devanagari + 0x0980, # .. 0x0983 ; Bengali + 0x0984, # .. 0x0984 ; Unknown + 0x0985, # .. 0x098C ; Bengali + 0x098D, # .. 0x098E ; Unknown + 0x098F, # .. 0x0990 ; Bengali + 0x0991, # .. 0x0992 ; Unknown + 0x0993, # .. 0x09A8 ; Bengali + 0x09A9, # .. 0x09A9 ; Unknown + 0x09AA, # .. 0x09B0 ; Bengali + 0x09B1, # .. 0x09B1 ; Unknown + 0x09B2, # .. 0x09B2 ; Bengali + 0x09B3, # .. 0x09B5 ; Unknown + 0x09B6, # .. 0x09B9 ; Bengali + 0x09BA, # .. 0x09BB ; Unknown + 0x09BC, # .. 0x09C4 ; Bengali + 0x09C5, # .. 0x09C6 ; Unknown + 0x09C7, # .. 0x09C8 ; Bengali + 0x09C9, # .. 0x09CA ; Unknown + 0x09CB, # .. 0x09CE ; Bengali + 0x09CF, # .. 0x09D6 ; Unknown + 0x09D7, # .. 0x09D7 ; Bengali + 0x09D8, # .. 0x09DB ; Unknown + 0x09DC, # .. 0x09DD ; Bengali + 0x09DE, # .. 0x09DE ; Unknown + 0x09DF, # .. 0x09E3 ; Bengali + 0x09E4, # .. 0x09E5 ; Unknown + 0x09E6, # .. 0x09FE ; Bengali + 0x09FF, # .. 0x0A00 ; Unknown + 0x0A01, # .. 0x0A03 ; Gurmukhi + 0x0A04, # .. 0x0A04 ; Unknown + 0x0A05, # .. 0x0A0A ; Gurmukhi + 0x0A0B, # .. 0x0A0E ; Unknown + 0x0A0F, # .. 0x0A10 ; Gurmukhi + 0x0A11, # .. 0x0A12 ; Unknown + 0x0A13, # .. 0x0A28 ; Gurmukhi + 0x0A29, # .. 0x0A29 ; Unknown + 0x0A2A, # .. 0x0A30 ; Gurmukhi + 0x0A31, # .. 0x0A31 ; Unknown + 0x0A32, # .. 0x0A33 ; Gurmukhi + 0x0A34, # .. 0x0A34 ; Unknown + 0x0A35, # .. 0x0A36 ; Gurmukhi + 0x0A37, # .. 0x0A37 ; Unknown + 0x0A38, # .. 0x0A39 ; Gurmukhi + 0x0A3A, # .. 0x0A3B ; Unknown + 0x0A3C, # .. 0x0A3C ; Gurmukhi + 0x0A3D, # .. 0x0A3D ; Unknown + 0x0A3E, # .. 0x0A42 ; Gurmukhi + 0x0A43, # .. 0x0A46 ; Unknown + 0x0A47, # .. 0x0A48 ; Gurmukhi + 0x0A49, # .. 0x0A4A ; Unknown + 0x0A4B, # .. 0x0A4D ; Gurmukhi + 0x0A4E, # .. 0x0A50 ; Unknown + 0x0A51, # .. 0x0A51 ; Gurmukhi + 0x0A52, # .. 0x0A58 ; Unknown + 0x0A59, # .. 0x0A5C ; Gurmukhi + 0x0A5D, # .. 0x0A5D ; Unknown + 0x0A5E, # .. 0x0A5E ; Gurmukhi + 0x0A5F, # .. 0x0A65 ; Unknown + 0x0A66, # .. 0x0A76 ; Gurmukhi + 0x0A77, # .. 0x0A80 ; Unknown + 0x0A81, # .. 0x0A83 ; Gujarati + 0x0A84, # .. 0x0A84 ; Unknown + 0x0A85, # .. 0x0A8D ; Gujarati + 0x0A8E, # .. 0x0A8E ; Unknown + 0x0A8F, # .. 0x0A91 ; Gujarati + 0x0A92, # .. 0x0A92 ; Unknown + 0x0A93, # .. 0x0AA8 ; Gujarati + 0x0AA9, # .. 0x0AA9 ; Unknown + 0x0AAA, # .. 0x0AB0 ; Gujarati + 0x0AB1, # .. 0x0AB1 ; Unknown + 0x0AB2, # .. 0x0AB3 ; Gujarati + 0x0AB4, # .. 0x0AB4 ; Unknown + 0x0AB5, # .. 0x0AB9 ; Gujarati + 0x0ABA, # .. 0x0ABB ; Unknown + 0x0ABC, # .. 0x0AC5 ; Gujarati + 0x0AC6, # .. 0x0AC6 ; Unknown + 0x0AC7, # .. 0x0AC9 ; Gujarati + 0x0ACA, # .. 0x0ACA ; Unknown + 0x0ACB, # .. 0x0ACD ; Gujarati + 0x0ACE, # .. 0x0ACF ; Unknown + 0x0AD0, # .. 0x0AD0 ; Gujarati + 0x0AD1, # .. 0x0ADF ; Unknown + 0x0AE0, # .. 0x0AE3 ; Gujarati + 0x0AE4, # .. 0x0AE5 ; Unknown + 0x0AE6, # .. 0x0AF1 ; Gujarati + 0x0AF2, # .. 0x0AF8 ; Unknown + 0x0AF9, # .. 0x0AFF ; Gujarati + 0x0B00, # .. 0x0B00 ; Unknown + 0x0B01, # .. 0x0B03 ; Oriya + 0x0B04, # .. 0x0B04 ; Unknown + 0x0B05, # .. 0x0B0C ; Oriya + 0x0B0D, # .. 0x0B0E ; Unknown + 0x0B0F, # .. 0x0B10 ; Oriya + 0x0B11, # .. 0x0B12 ; Unknown + 0x0B13, # .. 0x0B28 ; Oriya + 0x0B29, # .. 0x0B29 ; Unknown + 0x0B2A, # .. 0x0B30 ; Oriya + 0x0B31, # .. 0x0B31 ; Unknown + 0x0B32, # .. 0x0B33 ; Oriya + 0x0B34, # .. 0x0B34 ; Unknown + 0x0B35, # .. 0x0B39 ; Oriya + 0x0B3A, # .. 0x0B3B ; Unknown + 0x0B3C, # .. 0x0B44 ; Oriya + 0x0B45, # .. 0x0B46 ; Unknown + 0x0B47, # .. 0x0B48 ; Oriya + 0x0B49, # .. 0x0B4A ; Unknown + 0x0B4B, # .. 0x0B4D ; Oriya + 0x0B4E, # .. 0x0B54 ; Unknown + 0x0B55, # .. 0x0B57 ; Oriya + 0x0B58, # .. 0x0B5B ; Unknown + 0x0B5C, # .. 0x0B5D ; Oriya + 0x0B5E, # .. 0x0B5E ; Unknown + 0x0B5F, # .. 0x0B63 ; Oriya + 0x0B64, # .. 0x0B65 ; Unknown + 0x0B66, # .. 0x0B77 ; Oriya + 0x0B78, # .. 0x0B81 ; Unknown + 0x0B82, # .. 0x0B83 ; Tamil + 0x0B84, # .. 0x0B84 ; Unknown + 0x0B85, # .. 0x0B8A ; Tamil + 0x0B8B, # .. 0x0B8D ; Unknown + 0x0B8E, # .. 0x0B90 ; Tamil + 0x0B91, # .. 0x0B91 ; Unknown + 0x0B92, # .. 0x0B95 ; Tamil + 0x0B96, # .. 0x0B98 ; Unknown + 0x0B99, # .. 0x0B9A ; Tamil + 0x0B9B, # .. 0x0B9B ; Unknown + 0x0B9C, # .. 0x0B9C ; Tamil + 0x0B9D, # .. 0x0B9D ; Unknown + 0x0B9E, # .. 0x0B9F ; Tamil + 0x0BA0, # .. 0x0BA2 ; Unknown + 0x0BA3, # .. 0x0BA4 ; Tamil + 0x0BA5, # .. 0x0BA7 ; Unknown + 0x0BA8, # .. 0x0BAA ; Tamil + 0x0BAB, # .. 0x0BAD ; Unknown + 0x0BAE, # .. 0x0BB9 ; Tamil + 0x0BBA, # .. 0x0BBD ; Unknown + 0x0BBE, # .. 0x0BC2 ; Tamil + 0x0BC3, # .. 0x0BC5 ; Unknown + 0x0BC6, # .. 0x0BC8 ; Tamil + 0x0BC9, # .. 0x0BC9 ; Unknown + 0x0BCA, # .. 0x0BCD ; Tamil + 0x0BCE, # .. 0x0BCF ; Unknown + 0x0BD0, # .. 0x0BD0 ; Tamil + 0x0BD1, # .. 0x0BD6 ; Unknown + 0x0BD7, # .. 0x0BD7 ; Tamil + 0x0BD8, # .. 0x0BE5 ; Unknown + 0x0BE6, # .. 0x0BFA ; Tamil + 0x0BFB, # .. 0x0BFF ; Unknown + 0x0C00, # .. 0x0C0C ; Telugu + 0x0C0D, # .. 0x0C0D ; Unknown + 0x0C0E, # .. 0x0C10 ; Telugu + 0x0C11, # .. 0x0C11 ; Unknown + 0x0C12, # .. 0x0C28 ; Telugu + 0x0C29, # .. 0x0C29 ; Unknown + 0x0C2A, # .. 0x0C39 ; Telugu + 0x0C3A, # .. 0x0C3B ; Unknown + 0x0C3C, # .. 0x0C44 ; Telugu + 0x0C45, # .. 0x0C45 ; Unknown + 0x0C46, # .. 0x0C48 ; Telugu + 0x0C49, # .. 0x0C49 ; Unknown + 0x0C4A, # .. 0x0C4D ; Telugu + 0x0C4E, # .. 0x0C54 ; Unknown + 0x0C55, # .. 0x0C56 ; Telugu + 0x0C57, # .. 0x0C57 ; Unknown + 0x0C58, # .. 0x0C5A ; Telugu + 0x0C5B, # .. 0x0C5C ; Unknown + 0x0C5D, # .. 0x0C5D ; Telugu + 0x0C5E, # .. 0x0C5F ; Unknown + 0x0C60, # .. 0x0C63 ; Telugu + 0x0C64, # .. 0x0C65 ; Unknown + 0x0C66, # .. 0x0C6F ; Telugu + 0x0C70, # .. 0x0C76 ; Unknown + 0x0C77, # .. 0x0C7F ; Telugu + 0x0C80, # .. 0x0C8C ; Kannada + 0x0C8D, # .. 0x0C8D ; Unknown + 0x0C8E, # .. 0x0C90 ; Kannada + 0x0C91, # .. 0x0C91 ; Unknown + 0x0C92, # .. 0x0CA8 ; Kannada + 0x0CA9, # .. 0x0CA9 ; Unknown + 0x0CAA, # .. 0x0CB3 ; Kannada + 0x0CB4, # .. 0x0CB4 ; Unknown + 0x0CB5, # .. 0x0CB9 ; Kannada + 0x0CBA, # .. 0x0CBB ; Unknown + 0x0CBC, # .. 0x0CC4 ; Kannada + 0x0CC5, # .. 0x0CC5 ; Unknown + 0x0CC6, # .. 0x0CC8 ; Kannada + 0x0CC9, # .. 0x0CC9 ; Unknown + 0x0CCA, # .. 0x0CCD ; Kannada + 0x0CCE, # .. 0x0CD4 ; Unknown + 0x0CD5, # .. 0x0CD6 ; Kannada + 0x0CD7, # .. 0x0CDC ; Unknown + 0x0CDD, # .. 0x0CDE ; Kannada + 0x0CDF, # .. 0x0CDF ; Unknown + 0x0CE0, # .. 0x0CE3 ; Kannada + 0x0CE4, # .. 0x0CE5 ; Unknown + 0x0CE6, # .. 0x0CEF ; Kannada + 0x0CF0, # .. 0x0CF0 ; Unknown + 0x0CF1, # .. 0x0CF3 ; Kannada + 0x0CF4, # .. 0x0CFF ; Unknown + 0x0D00, # .. 0x0D0C ; Malayalam + 0x0D0D, # .. 0x0D0D ; Unknown + 0x0D0E, # .. 0x0D10 ; Malayalam + 0x0D11, # .. 0x0D11 ; Unknown + 0x0D12, # .. 0x0D44 ; Malayalam + 0x0D45, # .. 0x0D45 ; Unknown + 0x0D46, # .. 0x0D48 ; Malayalam + 0x0D49, # .. 0x0D49 ; Unknown + 0x0D4A, # .. 0x0D4F ; Malayalam + 0x0D50, # .. 0x0D53 ; Unknown + 0x0D54, # .. 0x0D63 ; Malayalam + 0x0D64, # .. 0x0D65 ; Unknown + 0x0D66, # .. 0x0D7F ; Malayalam + 0x0D80, # .. 0x0D80 ; Unknown + 0x0D81, # .. 0x0D83 ; Sinhala + 0x0D84, # .. 0x0D84 ; Unknown + 0x0D85, # .. 0x0D96 ; Sinhala + 0x0D97, # .. 0x0D99 ; Unknown + 0x0D9A, # .. 0x0DB1 ; Sinhala + 0x0DB2, # .. 0x0DB2 ; Unknown + 0x0DB3, # .. 0x0DBB ; Sinhala + 0x0DBC, # .. 0x0DBC ; Unknown + 0x0DBD, # .. 0x0DBD ; Sinhala + 0x0DBE, # .. 0x0DBF ; Unknown + 0x0DC0, # .. 0x0DC6 ; Sinhala + 0x0DC7, # .. 0x0DC9 ; Unknown + 0x0DCA, # .. 0x0DCA ; Sinhala + 0x0DCB, # .. 0x0DCE ; Unknown + 0x0DCF, # .. 0x0DD4 ; Sinhala + 0x0DD5, # .. 0x0DD5 ; Unknown + 0x0DD6, # .. 0x0DD6 ; Sinhala + 0x0DD7, # .. 0x0DD7 ; Unknown + 0x0DD8, # .. 0x0DDF ; Sinhala + 0x0DE0, # .. 0x0DE5 ; Unknown + 0x0DE6, # .. 0x0DEF ; Sinhala + 0x0DF0, # .. 0x0DF1 ; Unknown + 0x0DF2, # .. 0x0DF4 ; Sinhala + 0x0DF5, # .. 0x0E00 ; Unknown + 0x0E01, # .. 0x0E3A ; Thai + 0x0E3B, # .. 0x0E3E ; Unknown + 0x0E3F, # .. 0x0E3F ; Common + 0x0E40, # .. 0x0E5B ; Thai + 0x0E5C, # .. 0x0E80 ; Unknown + 0x0E81, # .. 0x0E82 ; Lao + 0x0E83, # .. 0x0E83 ; Unknown + 0x0E84, # .. 0x0E84 ; Lao + 0x0E85, # .. 0x0E85 ; Unknown + 0x0E86, # .. 0x0E8A ; Lao + 0x0E8B, # .. 0x0E8B ; Unknown + 0x0E8C, # .. 0x0EA3 ; Lao + 0x0EA4, # .. 0x0EA4 ; Unknown + 0x0EA5, # .. 0x0EA5 ; Lao + 0x0EA6, # .. 0x0EA6 ; Unknown + 0x0EA7, # .. 0x0EBD ; Lao + 0x0EBE, # .. 0x0EBF ; Unknown + 0x0EC0, # .. 0x0EC4 ; Lao + 0x0EC5, # .. 0x0EC5 ; Unknown + 0x0EC6, # .. 0x0EC6 ; Lao + 0x0EC7, # .. 0x0EC7 ; Unknown + 0x0EC8, # .. 0x0ECE ; Lao + 0x0ECF, # .. 0x0ECF ; Unknown + 0x0ED0, # .. 0x0ED9 ; Lao + 0x0EDA, # .. 0x0EDB ; Unknown + 0x0EDC, # .. 0x0EDF ; Lao + 0x0EE0, # .. 0x0EFF ; Unknown + 0x0F00, # .. 0x0F47 ; Tibetan + 0x0F48, # .. 0x0F48 ; Unknown + 0x0F49, # .. 0x0F6C ; Tibetan + 0x0F6D, # .. 0x0F70 ; Unknown + 0x0F71, # .. 0x0F97 ; Tibetan + 0x0F98, # .. 0x0F98 ; Unknown + 0x0F99, # .. 0x0FBC ; Tibetan + 0x0FBD, # .. 0x0FBD ; Unknown + 0x0FBE, # .. 0x0FCC ; Tibetan + 0x0FCD, # .. 0x0FCD ; Unknown + 0x0FCE, # .. 0x0FD4 ; Tibetan + 0x0FD5, # .. 0x0FD8 ; Common + 0x0FD9, # .. 0x0FDA ; Tibetan + 0x0FDB, # .. 0x0FFF ; Unknown + 0x1000, # .. 0x109F ; Myanmar + 0x10A0, # .. 0x10C5 ; Georgian + 0x10C6, # .. 0x10C6 ; Unknown + 0x10C7, # .. 0x10C7 ; Georgian + 0x10C8, # .. 0x10CC ; Unknown + 0x10CD, # .. 0x10CD ; Georgian + 0x10CE, # .. 0x10CF ; Unknown + 0x10D0, # .. 0x10FA ; Georgian + 0x10FB, # .. 0x10FB ; Common + 0x10FC, # .. 0x10FF ; Georgian + 0x1100, # .. 0x11FF ; Hangul + 0x1200, # .. 0x1248 ; Ethiopic + 0x1249, # .. 0x1249 ; Unknown + 0x124A, # .. 0x124D ; Ethiopic + 0x124E, # .. 0x124F ; Unknown + 0x1250, # .. 0x1256 ; Ethiopic + 0x1257, # .. 0x1257 ; Unknown + 0x1258, # .. 0x1258 ; Ethiopic + 0x1259, # .. 0x1259 ; Unknown + 0x125A, # .. 0x125D ; Ethiopic + 0x125E, # .. 0x125F ; Unknown + 0x1260, # .. 0x1288 ; Ethiopic + 0x1289, # .. 0x1289 ; Unknown + 0x128A, # .. 0x128D ; Ethiopic + 0x128E, # .. 0x128F ; Unknown + 0x1290, # .. 0x12B0 ; Ethiopic + 0x12B1, # .. 0x12B1 ; Unknown + 0x12B2, # .. 0x12B5 ; Ethiopic + 0x12B6, # .. 0x12B7 ; Unknown + 0x12B8, # .. 0x12BE ; Ethiopic + 0x12BF, # .. 0x12BF ; Unknown + 0x12C0, # .. 0x12C0 ; Ethiopic + 0x12C1, # .. 0x12C1 ; Unknown + 0x12C2, # .. 0x12C5 ; Ethiopic + 0x12C6, # .. 0x12C7 ; Unknown + 0x12C8, # .. 0x12D6 ; Ethiopic + 0x12D7, # .. 0x12D7 ; Unknown + 0x12D8, # .. 0x1310 ; Ethiopic + 0x1311, # .. 0x1311 ; Unknown + 0x1312, # .. 0x1315 ; Ethiopic + 0x1316, # .. 0x1317 ; Unknown + 0x1318, # .. 0x135A ; Ethiopic + 0x135B, # .. 0x135C ; Unknown + 0x135D, # .. 0x137C ; Ethiopic + 0x137D, # .. 0x137F ; Unknown + 0x1380, # .. 0x1399 ; Ethiopic + 0x139A, # .. 0x139F ; Unknown + 0x13A0, # .. 0x13F5 ; Cherokee + 0x13F6, # .. 0x13F7 ; Unknown + 0x13F8, # .. 0x13FD ; Cherokee + 0x13FE, # .. 0x13FF ; Unknown + 0x1400, # .. 0x167F ; Canadian_Aboriginal + 0x1680, # .. 0x169C ; Ogham + 0x169D, # .. 0x169F ; Unknown + 0x16A0, # .. 0x16EA ; Runic + 0x16EB, # .. 0x16ED ; Common + 0x16EE, # .. 0x16F8 ; Runic + 0x16F9, # .. 0x16FF ; Unknown + 0x1700, # .. 0x1715 ; Tagalog + 0x1716, # .. 0x171E ; Unknown + 0x171F, # .. 0x171F ; Tagalog + 0x1720, # .. 0x1734 ; Hanunoo + 0x1735, # .. 0x1736 ; Common + 0x1737, # .. 0x173F ; Unknown + 0x1740, # .. 0x1753 ; Buhid + 0x1754, # .. 0x175F ; Unknown + 0x1760, # .. 0x176C ; Tagbanwa + 0x176D, # .. 0x176D ; Unknown + 0x176E, # .. 0x1770 ; Tagbanwa + 0x1771, # .. 0x1771 ; Unknown + 0x1772, # .. 0x1773 ; Tagbanwa + 0x1774, # .. 0x177F ; Unknown + 0x1780, # .. 0x17DD ; Khmer + 0x17DE, # .. 0x17DF ; Unknown + 0x17E0, # .. 0x17E9 ; Khmer + 0x17EA, # .. 0x17EF ; Unknown + 0x17F0, # .. 0x17F9 ; Khmer + 0x17FA, # .. 0x17FF ; Unknown + 0x1800, # .. 0x1801 ; Mongolian + 0x1802, # .. 0x1803 ; Common + 0x1804, # .. 0x1804 ; Mongolian + 0x1805, # .. 0x1805 ; Common + 0x1806, # .. 0x1819 ; Mongolian + 0x181A, # .. 0x181F ; Unknown + 0x1820, # .. 0x1878 ; Mongolian + 0x1879, # .. 0x187F ; Unknown + 0x1880, # .. 0x18AA ; Mongolian + 0x18AB, # .. 0x18AF ; Unknown + 0x18B0, # .. 0x18F5 ; Canadian_Aboriginal + 0x18F6, # .. 0x18FF ; Unknown + 0x1900, # .. 0x191E ; Limbu + 0x191F, # .. 0x191F ; Unknown + 0x1920, # .. 0x192B ; Limbu + 0x192C, # .. 0x192F ; Unknown + 0x1930, # .. 0x193B ; Limbu + 0x193C, # .. 0x193F ; Unknown + 0x1940, # .. 0x1940 ; Limbu + 0x1941, # .. 0x1943 ; Unknown + 0x1944, # .. 0x194F ; Limbu + 0x1950, # .. 0x196D ; Tai_Le + 0x196E, # .. 0x196F ; Unknown + 0x1970, # .. 0x1974 ; Tai_Le + 0x1975, # .. 0x197F ; Unknown + 0x1980, # .. 0x19AB ; New_Tai_Lue + 0x19AC, # .. 0x19AF ; Unknown + 0x19B0, # .. 0x19C9 ; New_Tai_Lue + 0x19CA, # .. 0x19CF ; Unknown + 0x19D0, # .. 0x19DA ; New_Tai_Lue + 0x19DB, # .. 0x19DD ; Unknown + 0x19DE, # .. 0x19DF ; New_Tai_Lue + 0x19E0, # .. 0x19FF ; Khmer + 0x1A00, # .. 0x1A1B ; Buginese + 0x1A1C, # .. 0x1A1D ; Unknown + 0x1A1E, # .. 0x1A1F ; Buginese + 0x1A20, # .. 0x1A5E ; Tai_Tham + 0x1A5F, # .. 0x1A5F ; Unknown + 0x1A60, # .. 0x1A7C ; Tai_Tham + 0x1A7D, # .. 0x1A7E ; Unknown + 0x1A7F, # .. 0x1A89 ; Tai_Tham + 0x1A8A, # .. 0x1A8F ; Unknown + 0x1A90, # .. 0x1A99 ; Tai_Tham + 0x1A9A, # .. 0x1A9F ; Unknown + 0x1AA0, # .. 0x1AAD ; Tai_Tham + 0x1AAE, # .. 0x1AAF ; Unknown + 0x1AB0, # .. 0x1ACE ; Inherited + 0x1ACF, # .. 0x1AFF ; Unknown + 0x1B00, # .. 0x1B4C ; Balinese + 0x1B4D, # .. 0x1B4D ; Unknown + 0x1B4E, # .. 0x1B7F ; Balinese + 0x1B80, # .. 0x1BBF ; Sundanese + 0x1BC0, # .. 0x1BF3 ; Batak + 0x1BF4, # .. 0x1BFB ; Unknown + 0x1BFC, # .. 0x1BFF ; Batak + 0x1C00, # .. 0x1C37 ; Lepcha + 0x1C38, # .. 0x1C3A ; Unknown + 0x1C3B, # .. 0x1C49 ; Lepcha + 0x1C4A, # .. 0x1C4C ; Unknown + 0x1C4D, # .. 0x1C4F ; Lepcha + 0x1C50, # .. 0x1C7F ; Ol_Chiki + 0x1C80, # .. 0x1C8A ; Cyrillic + 0x1C8B, # .. 0x1C8F ; Unknown + 0x1C90, # .. 0x1CBA ; Georgian + 0x1CBB, # .. 0x1CBC ; Unknown + 0x1CBD, # .. 0x1CBF ; Georgian + 0x1CC0, # .. 0x1CC7 ; Sundanese + 0x1CC8, # .. 0x1CCF ; Unknown + 0x1CD0, # .. 0x1CD2 ; Inherited + 0x1CD3, # .. 0x1CD3 ; Common + 0x1CD4, # .. 0x1CE0 ; Inherited + 0x1CE1, # .. 0x1CE1 ; Common + 0x1CE2, # .. 0x1CE8 ; Inherited + 0x1CE9, # .. 0x1CEC ; Common + 0x1CED, # .. 0x1CED ; Inherited + 0x1CEE, # .. 0x1CF3 ; Common + 0x1CF4, # .. 0x1CF4 ; Inherited + 0x1CF5, # .. 0x1CF7 ; Common + 0x1CF8, # .. 0x1CF9 ; Inherited + 0x1CFA, # .. 0x1CFA ; Common + 0x1CFB, # .. 0x1CFF ; Unknown + 0x1D00, # .. 0x1D25 ; Latin + 0x1D26, # .. 0x1D2A ; Greek + 0x1D2B, # .. 0x1D2B ; Cyrillic + 0x1D2C, # .. 0x1D5C ; Latin + 0x1D5D, # .. 0x1D61 ; Greek + 0x1D62, # .. 0x1D65 ; Latin + 0x1D66, # .. 0x1D6A ; Greek + 0x1D6B, # .. 0x1D77 ; Latin + 0x1D78, # .. 0x1D78 ; Cyrillic + 0x1D79, # .. 0x1DBE ; Latin + 0x1DBF, # .. 0x1DBF ; Greek + 0x1DC0, # .. 0x1DFF ; Inherited + 0x1E00, # .. 0x1EFF ; Latin + 0x1F00, # .. 0x1F15 ; Greek + 0x1F16, # .. 0x1F17 ; Unknown + 0x1F18, # .. 0x1F1D ; Greek + 0x1F1E, # .. 0x1F1F ; Unknown + 0x1F20, # .. 0x1F45 ; Greek + 0x1F46, # .. 0x1F47 ; Unknown + 0x1F48, # .. 0x1F4D ; Greek + 0x1F4E, # .. 0x1F4F ; Unknown + 0x1F50, # .. 0x1F57 ; Greek + 0x1F58, # .. 0x1F58 ; Unknown + 0x1F59, # .. 0x1F59 ; Greek + 0x1F5A, # .. 0x1F5A ; Unknown + 0x1F5B, # .. 0x1F5B ; Greek + 0x1F5C, # .. 0x1F5C ; Unknown + 0x1F5D, # .. 0x1F5D ; Greek + 0x1F5E, # .. 0x1F5E ; Unknown + 0x1F5F, # .. 0x1F7D ; Greek + 0x1F7E, # .. 0x1F7F ; Unknown + 0x1F80, # .. 0x1FB4 ; Greek + 0x1FB5, # .. 0x1FB5 ; Unknown + 0x1FB6, # .. 0x1FC4 ; Greek + 0x1FC5, # .. 0x1FC5 ; Unknown + 0x1FC6, # .. 0x1FD3 ; Greek + 0x1FD4, # .. 0x1FD5 ; Unknown + 0x1FD6, # .. 0x1FDB ; Greek + 0x1FDC, # .. 0x1FDC ; Unknown + 0x1FDD, # .. 0x1FEF ; Greek + 0x1FF0, # .. 0x1FF1 ; Unknown + 0x1FF2, # .. 0x1FF4 ; Greek + 0x1FF5, # .. 0x1FF5 ; Unknown + 0x1FF6, # .. 0x1FFE ; Greek + 0x1FFF, # .. 0x1FFF ; Unknown + 0x2000, # .. 0x200B ; Common + 0x200C, # .. 0x200D ; Inherited + 0x200E, # .. 0x2064 ; Common + 0x2065, # .. 0x2065 ; Unknown + 0x2066, # .. 0x2070 ; Common + 0x2071, # .. 0x2071 ; Latin + 0x2072, # .. 0x2073 ; Unknown + 0x2074, # .. 0x207E ; Common + 0x207F, # .. 0x207F ; Latin + 0x2080, # .. 0x208E ; Common + 0x208F, # .. 0x208F ; Unknown + 0x2090, # .. 0x209C ; Latin + 0x209D, # .. 0x209F ; Unknown + 0x20A0, # .. 0x20C0 ; Common + 0x20C1, # .. 0x20CF ; Unknown + 0x20D0, # .. 0x20F0 ; Inherited + 0x20F1, # .. 0x20FF ; Unknown + 0x2100, # .. 0x2125 ; Common + 0x2126, # .. 0x2126 ; Greek + 0x2127, # .. 0x2129 ; Common + 0x212A, # .. 0x212B ; Latin + 0x212C, # .. 0x2131 ; Common + 0x2132, # .. 0x2132 ; Latin + 0x2133, # .. 0x214D ; Common + 0x214E, # .. 0x214E ; Latin + 0x214F, # .. 0x215F ; Common + 0x2160, # .. 0x2188 ; Latin + 0x2189, # .. 0x218B ; Common + 0x218C, # .. 0x218F ; Unknown + 0x2190, # .. 0x2429 ; Common + 0x242A, # .. 0x243F ; Unknown + 0x2440, # .. 0x244A ; Common + 0x244B, # .. 0x245F ; Unknown + 0x2460, # .. 0x27FF ; Common + 0x2800, # .. 0x28FF ; Braille + 0x2900, # .. 0x2B73 ; Common + 0x2B74, # .. 0x2B75 ; Unknown + 0x2B76, # .. 0x2B95 ; Common + 0x2B96, # .. 0x2B96 ; Unknown + 0x2B97, # .. 0x2BFF ; Common + 0x2C00, # .. 0x2C5F ; Glagolitic + 0x2C60, # .. 0x2C7F ; Latin + 0x2C80, # .. 0x2CF3 ; Coptic + 0x2CF4, # .. 0x2CF8 ; Unknown + 0x2CF9, # .. 0x2CFF ; Coptic + 0x2D00, # .. 0x2D25 ; Georgian + 0x2D26, # .. 0x2D26 ; Unknown + 0x2D27, # .. 0x2D27 ; Georgian + 0x2D28, # .. 0x2D2C ; Unknown + 0x2D2D, # .. 0x2D2D ; Georgian + 0x2D2E, # .. 0x2D2F ; Unknown + 0x2D30, # .. 0x2D67 ; Tifinagh + 0x2D68, # .. 0x2D6E ; Unknown + 0x2D6F, # .. 0x2D70 ; Tifinagh + 0x2D71, # .. 0x2D7E ; Unknown + 0x2D7F, # .. 0x2D7F ; Tifinagh + 0x2D80, # .. 0x2D96 ; Ethiopic + 0x2D97, # .. 0x2D9F ; Unknown + 0x2DA0, # .. 0x2DA6 ; Ethiopic + 0x2DA7, # .. 0x2DA7 ; Unknown + 0x2DA8, # .. 0x2DAE ; Ethiopic + 0x2DAF, # .. 0x2DAF ; Unknown + 0x2DB0, # .. 0x2DB6 ; Ethiopic + 0x2DB7, # .. 0x2DB7 ; Unknown + 0x2DB8, # .. 0x2DBE ; Ethiopic + 0x2DBF, # .. 0x2DBF ; Unknown + 0x2DC0, # .. 0x2DC6 ; Ethiopic + 0x2DC7, # .. 0x2DC7 ; Unknown + 0x2DC8, # .. 0x2DCE ; Ethiopic + 0x2DCF, # .. 0x2DCF ; Unknown + 0x2DD0, # .. 0x2DD6 ; Ethiopic + 0x2DD7, # .. 0x2DD7 ; Unknown + 0x2DD8, # .. 0x2DDE ; Ethiopic + 0x2DDF, # .. 0x2DDF ; Unknown + 0x2DE0, # .. 0x2DFF ; Cyrillic + 0x2E00, # .. 0x2E5D ; Common + 0x2E5E, # .. 0x2E7F ; Unknown + 0x2E80, # .. 0x2E99 ; Han + 0x2E9A, # .. 0x2E9A ; Unknown + 0x2E9B, # .. 0x2EF3 ; Han + 0x2EF4, # .. 0x2EFF ; Unknown + 0x2F00, # .. 0x2FD5 ; Han + 0x2FD6, # .. 0x2FEF ; Unknown + 0x2FF0, # .. 0x3004 ; Common + 0x3005, # .. 0x3005 ; Han + 0x3006, # .. 0x3006 ; Common + 0x3007, # .. 0x3007 ; Han + 0x3008, # .. 0x3020 ; Common + 0x3021, # .. 0x3029 ; Han + 0x302A, # .. 0x302D ; Inherited + 0x302E, # .. 0x302F ; Hangul + 0x3030, # .. 0x3037 ; Common + 0x3038, # .. 0x303B ; Han + 0x303C, # .. 0x303F ; Common + 0x3040, # .. 0x3040 ; Unknown + 0x3041, # .. 0x3096 ; Hiragana + 0x3097, # .. 0x3098 ; Unknown + 0x3099, # .. 0x309A ; Inherited + 0x309B, # .. 0x309C ; Common + 0x309D, # .. 0x309F ; Hiragana + 0x30A0, # .. 0x30A0 ; Common + 0x30A1, # .. 0x30FA ; Katakana + 0x30FB, # .. 0x30FC ; Common + 0x30FD, # .. 0x30FF ; Katakana + 0x3100, # .. 0x3104 ; Unknown + 0x3105, # .. 0x312F ; Bopomofo + 0x3130, # .. 0x3130 ; Unknown + 0x3131, # .. 0x318E ; Hangul + 0x318F, # .. 0x318F ; Unknown + 0x3190, # .. 0x319F ; Common + 0x31A0, # .. 0x31BF ; Bopomofo + 0x31C0, # .. 0x31E5 ; Common + 0x31E6, # .. 0x31EE ; Unknown + 0x31EF, # .. 0x31EF ; Common + 0x31F0, # .. 0x31FF ; Katakana + 0x3200, # .. 0x321E ; Hangul + 0x321F, # .. 0x321F ; Unknown + 0x3220, # .. 0x325F ; Common + 0x3260, # .. 0x327E ; Hangul + 0x327F, # .. 0x32CF ; Common + 0x32D0, # .. 0x32FE ; Katakana + 0x32FF, # .. 0x32FF ; Common + 0x3300, # .. 0x3357 ; Katakana + 0x3358, # .. 0x33FF ; Common + 0x3400, # .. 0x4DBF ; Han + 0x4DC0, # .. 0x4DFF ; Common + 0x4E00, # .. 0x9FFF ; Han + 0xA000, # .. 0xA48C ; Yi + 0xA48D, # .. 0xA48F ; Unknown + 0xA490, # .. 0xA4C6 ; Yi + 0xA4C7, # .. 0xA4CF ; Unknown + 0xA4D0, # .. 0xA4FF ; Lisu + 0xA500, # .. 0xA62B ; Vai + 0xA62C, # .. 0xA63F ; Unknown + 0xA640, # .. 0xA69F ; Cyrillic + 0xA6A0, # .. 0xA6F7 ; Bamum + 0xA6F8, # .. 0xA6FF ; Unknown + 0xA700, # .. 0xA721 ; Common + 0xA722, # .. 0xA787 ; Latin + 0xA788, # .. 0xA78A ; Common + 0xA78B, # .. 0xA7CD ; Latin + 0xA7CE, # .. 0xA7CF ; Unknown + 0xA7D0, # .. 0xA7D1 ; Latin + 0xA7D2, # .. 0xA7D2 ; Unknown + 0xA7D3, # .. 0xA7D3 ; Latin + 0xA7D4, # .. 0xA7D4 ; Unknown + 0xA7D5, # .. 0xA7DC ; Latin + 0xA7DD, # .. 0xA7F1 ; Unknown + 0xA7F2, # .. 0xA7FF ; Latin + 0xA800, # .. 0xA82C ; Syloti_Nagri + 0xA82D, # .. 0xA82F ; Unknown + 0xA830, # .. 0xA839 ; Common + 0xA83A, # .. 0xA83F ; Unknown + 0xA840, # .. 0xA877 ; Phags_Pa + 0xA878, # .. 0xA87F ; Unknown + 0xA880, # .. 0xA8C5 ; Saurashtra + 0xA8C6, # .. 0xA8CD ; Unknown + 0xA8CE, # .. 0xA8D9 ; Saurashtra + 0xA8DA, # .. 0xA8DF ; Unknown + 0xA8E0, # .. 0xA8FF ; Devanagari + 0xA900, # .. 0xA92D ; Kayah_Li + 0xA92E, # .. 0xA92E ; Common + 0xA92F, # .. 0xA92F ; Kayah_Li + 0xA930, # .. 0xA953 ; Rejang + 0xA954, # .. 0xA95E ; Unknown + 0xA95F, # .. 0xA95F ; Rejang + 0xA960, # .. 0xA97C ; Hangul + 0xA97D, # .. 0xA97F ; Unknown + 0xA980, # .. 0xA9CD ; Javanese + 0xA9CE, # .. 0xA9CE ; Unknown + 0xA9CF, # .. 0xA9CF ; Common + 0xA9D0, # .. 0xA9D9 ; Javanese + 0xA9DA, # .. 0xA9DD ; Unknown + 0xA9DE, # .. 0xA9DF ; Javanese + 0xA9E0, # .. 0xA9FE ; Myanmar + 0xA9FF, # .. 0xA9FF ; Unknown + 0xAA00, # .. 0xAA36 ; Cham + 0xAA37, # .. 0xAA3F ; Unknown + 0xAA40, # .. 0xAA4D ; Cham + 0xAA4E, # .. 0xAA4F ; Unknown + 0xAA50, # .. 0xAA59 ; Cham + 0xAA5A, # .. 0xAA5B ; Unknown + 0xAA5C, # .. 0xAA5F ; Cham + 0xAA60, # .. 0xAA7F ; Myanmar + 0xAA80, # .. 0xAAC2 ; Tai_Viet + 0xAAC3, # .. 0xAADA ; Unknown + 0xAADB, # .. 0xAADF ; Tai_Viet + 0xAAE0, # .. 0xAAF6 ; Meetei_Mayek + 0xAAF7, # .. 0xAB00 ; Unknown + 0xAB01, # .. 0xAB06 ; Ethiopic + 0xAB07, # .. 0xAB08 ; Unknown + 0xAB09, # .. 0xAB0E ; Ethiopic + 0xAB0F, # .. 0xAB10 ; Unknown + 0xAB11, # .. 0xAB16 ; Ethiopic + 0xAB17, # .. 0xAB1F ; Unknown + 0xAB20, # .. 0xAB26 ; Ethiopic + 0xAB27, # .. 0xAB27 ; Unknown + 0xAB28, # .. 0xAB2E ; Ethiopic + 0xAB2F, # .. 0xAB2F ; Unknown + 0xAB30, # .. 0xAB5A ; Latin + 0xAB5B, # .. 0xAB5B ; Common + 0xAB5C, # .. 0xAB64 ; Latin + 0xAB65, # .. 0xAB65 ; Greek + 0xAB66, # .. 0xAB69 ; Latin + 0xAB6A, # .. 0xAB6B ; Common + 0xAB6C, # .. 0xAB6F ; Unknown + 0xAB70, # .. 0xABBF ; Cherokee + 0xABC0, # .. 0xABED ; Meetei_Mayek + 0xABEE, # .. 0xABEF ; Unknown + 0xABF0, # .. 0xABF9 ; Meetei_Mayek + 0xABFA, # .. 0xABFF ; Unknown + 0xAC00, # .. 0xD7A3 ; Hangul + 0xD7A4, # .. 0xD7AF ; Unknown + 0xD7B0, # .. 0xD7C6 ; Hangul + 0xD7C7, # .. 0xD7CA ; Unknown + 0xD7CB, # .. 0xD7FB ; Hangul + 0xD7FC, # .. 0xF8FF ; Unknown + 0xF900, # .. 0xFA6D ; Han + 0xFA6E, # .. 0xFA6F ; Unknown + 0xFA70, # .. 0xFAD9 ; Han + 0xFADA, # .. 0xFAFF ; Unknown + 0xFB00, # .. 0xFB06 ; Latin + 0xFB07, # .. 0xFB12 ; Unknown + 0xFB13, # .. 0xFB17 ; Armenian + 0xFB18, # .. 0xFB1C ; Unknown + 0xFB1D, # .. 0xFB36 ; Hebrew + 0xFB37, # .. 0xFB37 ; Unknown + 0xFB38, # .. 0xFB3C ; Hebrew + 0xFB3D, # .. 0xFB3D ; Unknown + 0xFB3E, # .. 0xFB3E ; Hebrew + 0xFB3F, # .. 0xFB3F ; Unknown + 0xFB40, # .. 0xFB41 ; Hebrew + 0xFB42, # .. 0xFB42 ; Unknown + 0xFB43, # .. 0xFB44 ; Hebrew + 0xFB45, # .. 0xFB45 ; Unknown + 0xFB46, # .. 0xFB4F ; Hebrew + 0xFB50, # .. 0xFBC2 ; Arabic + 0xFBC3, # .. 0xFBD2 ; Unknown + 0xFBD3, # .. 0xFD3D ; Arabic + 0xFD3E, # .. 0xFD3F ; Common + 0xFD40, # .. 0xFD8F ; Arabic + 0xFD90, # .. 0xFD91 ; Unknown + 0xFD92, # .. 0xFDC7 ; Arabic + 0xFDC8, # .. 0xFDCE ; Unknown + 0xFDCF, # .. 0xFDCF ; Arabic + 0xFDD0, # .. 0xFDEF ; Unknown + 0xFDF0, # .. 0xFDFF ; Arabic + 0xFE00, # .. 0xFE0F ; Inherited + 0xFE10, # .. 0xFE19 ; Common + 0xFE1A, # .. 0xFE1F ; Unknown + 0xFE20, # .. 0xFE2D ; Inherited + 0xFE2E, # .. 0xFE2F ; Cyrillic + 0xFE30, # .. 0xFE52 ; Common + 0xFE53, # .. 0xFE53 ; Unknown + 0xFE54, # .. 0xFE66 ; Common + 0xFE67, # .. 0xFE67 ; Unknown + 0xFE68, # .. 0xFE6B ; Common + 0xFE6C, # .. 0xFE6F ; Unknown + 0xFE70, # .. 0xFE74 ; Arabic + 0xFE75, # .. 0xFE75 ; Unknown + 0xFE76, # .. 0xFEFC ; Arabic + 0xFEFD, # .. 0xFEFE ; Unknown + 0xFEFF, # .. 0xFEFF ; Common + 0xFF00, # .. 0xFF00 ; Unknown + 0xFF01, # .. 0xFF20 ; Common + 0xFF21, # .. 0xFF3A ; Latin + 0xFF3B, # .. 0xFF40 ; Common + 0xFF41, # .. 0xFF5A ; Latin + 0xFF5B, # .. 0xFF65 ; Common + 0xFF66, # .. 0xFF6F ; Katakana + 0xFF70, # .. 0xFF70 ; Common + 0xFF71, # .. 0xFF9D ; Katakana + 0xFF9E, # .. 0xFF9F ; Common + 0xFFA0, # .. 0xFFBE ; Hangul + 0xFFBF, # .. 0xFFC1 ; Unknown + 0xFFC2, # .. 0xFFC7 ; Hangul + 0xFFC8, # .. 0xFFC9 ; Unknown + 0xFFCA, # .. 0xFFCF ; Hangul + 0xFFD0, # .. 0xFFD1 ; Unknown + 0xFFD2, # .. 0xFFD7 ; Hangul + 0xFFD8, # .. 0xFFD9 ; Unknown + 0xFFDA, # .. 0xFFDC ; Hangul + 0xFFDD, # .. 0xFFDF ; Unknown + 0xFFE0, # .. 0xFFE6 ; Common + 0xFFE7, # .. 0xFFE7 ; Unknown + 0xFFE8, # .. 0xFFEE ; Common + 0xFFEF, # .. 0xFFF8 ; Unknown + 0xFFF9, # .. 0xFFFD ; Common + 0xFFFE, # .. 0xFFFF ; Unknown + 0x10000, # .. 0x1000B ; Linear_B + 0x1000C, # .. 0x1000C ; Unknown + 0x1000D, # .. 0x10026 ; Linear_B + 0x10027, # .. 0x10027 ; Unknown + 0x10028, # .. 0x1003A ; Linear_B + 0x1003B, # .. 0x1003B ; Unknown + 0x1003C, # .. 0x1003D ; Linear_B + 0x1003E, # .. 0x1003E ; Unknown + 0x1003F, # .. 0x1004D ; Linear_B + 0x1004E, # .. 0x1004F ; Unknown + 0x10050, # .. 0x1005D ; Linear_B + 0x1005E, # .. 0x1007F ; Unknown + 0x10080, # .. 0x100FA ; Linear_B + 0x100FB, # .. 0x100FF ; Unknown + 0x10100, # .. 0x10102 ; Common + 0x10103, # .. 0x10106 ; Unknown + 0x10107, # .. 0x10133 ; Common + 0x10134, # .. 0x10136 ; Unknown + 0x10137, # .. 0x1013F ; Common + 0x10140, # .. 0x1018E ; Greek + 0x1018F, # .. 0x1018F ; Unknown + 0x10190, # .. 0x1019C ; Common + 0x1019D, # .. 0x1019F ; Unknown + 0x101A0, # .. 0x101A0 ; Greek + 0x101A1, # .. 0x101CF ; Unknown + 0x101D0, # .. 0x101FC ; Common + 0x101FD, # .. 0x101FD ; Inherited + 0x101FE, # .. 0x1027F ; Unknown + 0x10280, # .. 0x1029C ; Lycian + 0x1029D, # .. 0x1029F ; Unknown + 0x102A0, # .. 0x102D0 ; Carian + 0x102D1, # .. 0x102DF ; Unknown + 0x102E0, # .. 0x102E0 ; Inherited + 0x102E1, # .. 0x102FB ; Common + 0x102FC, # .. 0x102FF ; Unknown + 0x10300, # .. 0x10323 ; Old_Italic + 0x10324, # .. 0x1032C ; Unknown + 0x1032D, # .. 0x1032F ; Old_Italic + 0x10330, # .. 0x1034A ; Gothic + 0x1034B, # .. 0x1034F ; Unknown + 0x10350, # .. 0x1037A ; Old_Permic + 0x1037B, # .. 0x1037F ; Unknown + 0x10380, # .. 0x1039D ; Ugaritic + 0x1039E, # .. 0x1039E ; Unknown + 0x1039F, # .. 0x1039F ; Ugaritic + 0x103A0, # .. 0x103C3 ; Old_Persian + 0x103C4, # .. 0x103C7 ; Unknown + 0x103C8, # .. 0x103D5 ; Old_Persian + 0x103D6, # .. 0x103FF ; Unknown + 0x10400, # .. 0x1044F ; Deseret + 0x10450, # .. 0x1047F ; Shavian + 0x10480, # .. 0x1049D ; Osmanya + 0x1049E, # .. 0x1049F ; Unknown + 0x104A0, # .. 0x104A9 ; Osmanya + 0x104AA, # .. 0x104AF ; Unknown + 0x104B0, # .. 0x104D3 ; Osage + 0x104D4, # .. 0x104D7 ; Unknown + 0x104D8, # .. 0x104FB ; Osage + 0x104FC, # .. 0x104FF ; Unknown + 0x10500, # .. 0x10527 ; Elbasan + 0x10528, # .. 0x1052F ; Unknown + 0x10530, # .. 0x10563 ; Caucasian_Albanian + 0x10564, # .. 0x1056E ; Unknown + 0x1056F, # .. 0x1056F ; Caucasian_Albanian + 0x10570, # .. 0x1057A ; Vithkuqi + 0x1057B, # .. 0x1057B ; Unknown + 0x1057C, # .. 0x1058A ; Vithkuqi + 0x1058B, # .. 0x1058B ; Unknown + 0x1058C, # .. 0x10592 ; Vithkuqi + 0x10593, # .. 0x10593 ; Unknown + 0x10594, # .. 0x10595 ; Vithkuqi + 0x10596, # .. 0x10596 ; Unknown + 0x10597, # .. 0x105A1 ; Vithkuqi + 0x105A2, # .. 0x105A2 ; Unknown + 0x105A3, # .. 0x105B1 ; Vithkuqi + 0x105B2, # .. 0x105B2 ; Unknown + 0x105B3, # .. 0x105B9 ; Vithkuqi + 0x105BA, # .. 0x105BA ; Unknown + 0x105BB, # .. 0x105BC ; Vithkuqi + 0x105BD, # .. 0x105BF ; Unknown + 0x105C0, # .. 0x105F3 ; Todhri + 0x105F4, # .. 0x105FF ; Unknown + 0x10600, # .. 0x10736 ; Linear_A + 0x10737, # .. 0x1073F ; Unknown + 0x10740, # .. 0x10755 ; Linear_A + 0x10756, # .. 0x1075F ; Unknown + 0x10760, # .. 0x10767 ; Linear_A + 0x10768, # .. 0x1077F ; Unknown + 0x10780, # .. 0x10785 ; Latin + 0x10786, # .. 0x10786 ; Unknown + 0x10787, # .. 0x107B0 ; Latin + 0x107B1, # .. 0x107B1 ; Unknown + 0x107B2, # .. 0x107BA ; Latin + 0x107BB, # .. 0x107FF ; Unknown + 0x10800, # .. 0x10805 ; Cypriot + 0x10806, # .. 0x10807 ; Unknown + 0x10808, # .. 0x10808 ; Cypriot + 0x10809, # .. 0x10809 ; Unknown + 0x1080A, # .. 0x10835 ; Cypriot + 0x10836, # .. 0x10836 ; Unknown + 0x10837, # .. 0x10838 ; Cypriot + 0x10839, # .. 0x1083B ; Unknown + 0x1083C, # .. 0x1083C ; Cypriot + 0x1083D, # .. 0x1083E ; Unknown + 0x1083F, # .. 0x1083F ; Cypriot + 0x10840, # .. 0x10855 ; Imperial_Aramaic + 0x10856, # .. 0x10856 ; Unknown + 0x10857, # .. 0x1085F ; Imperial_Aramaic + 0x10860, # .. 0x1087F ; Palmyrene + 0x10880, # .. 0x1089E ; Nabataean + 0x1089F, # .. 0x108A6 ; Unknown + 0x108A7, # .. 0x108AF ; Nabataean + 0x108B0, # .. 0x108DF ; Unknown + 0x108E0, # .. 0x108F2 ; Hatran + 0x108F3, # .. 0x108F3 ; Unknown + 0x108F4, # .. 0x108F5 ; Hatran + 0x108F6, # .. 0x108FA ; Unknown + 0x108FB, # .. 0x108FF ; Hatran + 0x10900, # .. 0x1091B ; Phoenician + 0x1091C, # .. 0x1091E ; Unknown + 0x1091F, # .. 0x1091F ; Phoenician + 0x10920, # .. 0x10939 ; Lydian + 0x1093A, # .. 0x1093E ; Unknown + 0x1093F, # .. 0x1093F ; Lydian + 0x10940, # .. 0x1097F ; Unknown + 0x10980, # .. 0x1099F ; Meroitic_Hieroglyphs + 0x109A0, # .. 0x109B7 ; Meroitic_Cursive + 0x109B8, # .. 0x109BB ; Unknown + 0x109BC, # .. 0x109CF ; Meroitic_Cursive + 0x109D0, # .. 0x109D1 ; Unknown + 0x109D2, # .. 0x109FF ; Meroitic_Cursive + 0x10A00, # .. 0x10A03 ; Kharoshthi + 0x10A04, # .. 0x10A04 ; Unknown + 0x10A05, # .. 0x10A06 ; Kharoshthi + 0x10A07, # .. 0x10A0B ; Unknown + 0x10A0C, # .. 0x10A13 ; Kharoshthi + 0x10A14, # .. 0x10A14 ; Unknown + 0x10A15, # .. 0x10A17 ; Kharoshthi + 0x10A18, # .. 0x10A18 ; Unknown + 0x10A19, # .. 0x10A35 ; Kharoshthi + 0x10A36, # .. 0x10A37 ; Unknown + 0x10A38, # .. 0x10A3A ; Kharoshthi + 0x10A3B, # .. 0x10A3E ; Unknown + 0x10A3F, # .. 0x10A48 ; Kharoshthi + 0x10A49, # .. 0x10A4F ; Unknown + 0x10A50, # .. 0x10A58 ; Kharoshthi + 0x10A59, # .. 0x10A5F ; Unknown + 0x10A60, # .. 0x10A7F ; Old_South_Arabian + 0x10A80, # .. 0x10A9F ; Old_North_Arabian + 0x10AA0, # .. 0x10ABF ; Unknown + 0x10AC0, # .. 0x10AE6 ; Manichaean + 0x10AE7, # .. 0x10AEA ; Unknown + 0x10AEB, # .. 0x10AF6 ; Manichaean + 0x10AF7, # .. 0x10AFF ; Unknown + 0x10B00, # .. 0x10B35 ; Avestan + 0x10B36, # .. 0x10B38 ; Unknown + 0x10B39, # .. 0x10B3F ; Avestan + 0x10B40, # .. 0x10B55 ; Inscriptional_Parthian + 0x10B56, # .. 0x10B57 ; Unknown + 0x10B58, # .. 0x10B5F ; Inscriptional_Parthian + 0x10B60, # .. 0x10B72 ; Inscriptional_Pahlavi + 0x10B73, # .. 0x10B77 ; Unknown + 0x10B78, # .. 0x10B7F ; Inscriptional_Pahlavi + 0x10B80, # .. 0x10B91 ; Psalter_Pahlavi + 0x10B92, # .. 0x10B98 ; Unknown + 0x10B99, # .. 0x10B9C ; Psalter_Pahlavi + 0x10B9D, # .. 0x10BA8 ; Unknown + 0x10BA9, # .. 0x10BAF ; Psalter_Pahlavi + 0x10BB0, # .. 0x10BFF ; Unknown + 0x10C00, # .. 0x10C48 ; Old_Turkic + 0x10C49, # .. 0x10C7F ; Unknown + 0x10C80, # .. 0x10CB2 ; Old_Hungarian + 0x10CB3, # .. 0x10CBF ; Unknown + 0x10CC0, # .. 0x10CF2 ; Old_Hungarian + 0x10CF3, # .. 0x10CF9 ; Unknown + 0x10CFA, # .. 0x10CFF ; Old_Hungarian + 0x10D00, # .. 0x10D27 ; Hanifi_Rohingya + 0x10D28, # .. 0x10D2F ; Unknown + 0x10D30, # .. 0x10D39 ; Hanifi_Rohingya + 0x10D3A, # .. 0x10D3F ; Unknown + 0x10D40, # .. 0x10D65 ; Garay + 0x10D66, # .. 0x10D68 ; Unknown + 0x10D69, # .. 0x10D85 ; Garay + 0x10D86, # .. 0x10D8D ; Unknown + 0x10D8E, # .. 0x10D8F ; Garay + 0x10D90, # .. 0x10E5F ; Unknown + 0x10E60, # .. 0x10E7E ; Arabic + 0x10E7F, # .. 0x10E7F ; Unknown + 0x10E80, # .. 0x10EA9 ; Yezidi + 0x10EAA, # .. 0x10EAA ; Unknown + 0x10EAB, # .. 0x10EAD ; Yezidi + 0x10EAE, # .. 0x10EAF ; Unknown + 0x10EB0, # .. 0x10EB1 ; Yezidi + 0x10EB2, # .. 0x10EC1 ; Unknown + 0x10EC2, # .. 0x10EC4 ; Arabic + 0x10EC5, # .. 0x10EFB ; Unknown + 0x10EFC, # .. 0x10EFF ; Arabic + 0x10F00, # .. 0x10F27 ; Old_Sogdian + 0x10F28, # .. 0x10F2F ; Unknown + 0x10F30, # .. 0x10F59 ; Sogdian + 0x10F5A, # .. 0x10F6F ; Unknown + 0x10F70, # .. 0x10F89 ; Old_Uyghur + 0x10F8A, # .. 0x10FAF ; Unknown + 0x10FB0, # .. 0x10FCB ; Chorasmian + 0x10FCC, # .. 0x10FDF ; Unknown + 0x10FE0, # .. 0x10FF6 ; Elymaic + 0x10FF7, # .. 0x10FFF ; Unknown + 0x11000, # .. 0x1104D ; Brahmi + 0x1104E, # .. 0x11051 ; Unknown + 0x11052, # .. 0x11075 ; Brahmi + 0x11076, # .. 0x1107E ; Unknown + 0x1107F, # .. 0x1107F ; Brahmi + 0x11080, # .. 0x110C2 ; Kaithi + 0x110C3, # .. 0x110CC ; Unknown + 0x110CD, # .. 0x110CD ; Kaithi + 0x110CE, # .. 0x110CF ; Unknown + 0x110D0, # .. 0x110E8 ; Sora_Sompeng + 0x110E9, # .. 0x110EF ; Unknown + 0x110F0, # .. 0x110F9 ; Sora_Sompeng + 0x110FA, # .. 0x110FF ; Unknown + 0x11100, # .. 0x11134 ; Chakma + 0x11135, # .. 0x11135 ; Unknown + 0x11136, # .. 0x11147 ; Chakma + 0x11148, # .. 0x1114F ; Unknown + 0x11150, # .. 0x11176 ; Mahajani + 0x11177, # .. 0x1117F ; Unknown + 0x11180, # .. 0x111DF ; Sharada + 0x111E0, # .. 0x111E0 ; Unknown + 0x111E1, # .. 0x111F4 ; Sinhala + 0x111F5, # .. 0x111FF ; Unknown + 0x11200, # .. 0x11211 ; Khojki + 0x11212, # .. 0x11212 ; Unknown + 0x11213, # .. 0x11241 ; Khojki + 0x11242, # .. 0x1127F ; Unknown + 0x11280, # .. 0x11286 ; Multani + 0x11287, # .. 0x11287 ; Unknown + 0x11288, # .. 0x11288 ; Multani + 0x11289, # .. 0x11289 ; Unknown + 0x1128A, # .. 0x1128D ; Multani + 0x1128E, # .. 0x1128E ; Unknown + 0x1128F, # .. 0x1129D ; Multani + 0x1129E, # .. 0x1129E ; Unknown + 0x1129F, # .. 0x112A9 ; Multani + 0x112AA, # .. 0x112AF ; Unknown + 0x112B0, # .. 0x112EA ; Khudawadi + 0x112EB, # .. 0x112EF ; Unknown + 0x112F0, # .. 0x112F9 ; Khudawadi + 0x112FA, # .. 0x112FF ; Unknown + 0x11300, # .. 0x11303 ; Grantha + 0x11304, # .. 0x11304 ; Unknown + 0x11305, # .. 0x1130C ; Grantha + 0x1130D, # .. 0x1130E ; Unknown + 0x1130F, # .. 0x11310 ; Grantha + 0x11311, # .. 0x11312 ; Unknown + 0x11313, # .. 0x11328 ; Grantha + 0x11329, # .. 0x11329 ; Unknown + 0x1132A, # .. 0x11330 ; Grantha + 0x11331, # .. 0x11331 ; Unknown + 0x11332, # .. 0x11333 ; Grantha + 0x11334, # .. 0x11334 ; Unknown + 0x11335, # .. 0x11339 ; Grantha + 0x1133A, # .. 0x1133A ; Unknown + 0x1133B, # .. 0x1133B ; Inherited + 0x1133C, # .. 0x11344 ; Grantha + 0x11345, # .. 0x11346 ; Unknown + 0x11347, # .. 0x11348 ; Grantha + 0x11349, # .. 0x1134A ; Unknown + 0x1134B, # .. 0x1134D ; Grantha + 0x1134E, # .. 0x1134F ; Unknown + 0x11350, # .. 0x11350 ; Grantha + 0x11351, # .. 0x11356 ; Unknown + 0x11357, # .. 0x11357 ; Grantha + 0x11358, # .. 0x1135C ; Unknown + 0x1135D, # .. 0x11363 ; Grantha + 0x11364, # .. 0x11365 ; Unknown + 0x11366, # .. 0x1136C ; Grantha + 0x1136D, # .. 0x1136F ; Unknown + 0x11370, # .. 0x11374 ; Grantha + 0x11375, # .. 0x1137F ; Unknown + 0x11380, # .. 0x11389 ; Tulu_Tigalari + 0x1138A, # .. 0x1138A ; Unknown + 0x1138B, # .. 0x1138B ; Tulu_Tigalari + 0x1138C, # .. 0x1138D ; Unknown + 0x1138E, # .. 0x1138E ; Tulu_Tigalari + 0x1138F, # .. 0x1138F ; Unknown + 0x11390, # .. 0x113B5 ; Tulu_Tigalari + 0x113B6, # .. 0x113B6 ; Unknown + 0x113B7, # .. 0x113C0 ; Tulu_Tigalari + 0x113C1, # .. 0x113C1 ; Unknown + 0x113C2, # .. 0x113C2 ; Tulu_Tigalari + 0x113C3, # .. 0x113C4 ; Unknown + 0x113C5, # .. 0x113C5 ; Tulu_Tigalari + 0x113C6, # .. 0x113C6 ; Unknown + 0x113C7, # .. 0x113CA ; Tulu_Tigalari + 0x113CB, # .. 0x113CB ; Unknown + 0x113CC, # .. 0x113D5 ; Tulu_Tigalari + 0x113D6, # .. 0x113D6 ; Unknown + 0x113D7, # .. 0x113D8 ; Tulu_Tigalari + 0x113D9, # .. 0x113E0 ; Unknown + 0x113E1, # .. 0x113E2 ; Tulu_Tigalari + 0x113E3, # .. 0x113FF ; Unknown + 0x11400, # .. 0x1145B ; Newa + 0x1145C, # .. 0x1145C ; Unknown + 0x1145D, # .. 0x11461 ; Newa + 0x11462, # .. 0x1147F ; Unknown + 0x11480, # .. 0x114C7 ; Tirhuta + 0x114C8, # .. 0x114CF ; Unknown + 0x114D0, # .. 0x114D9 ; Tirhuta + 0x114DA, # .. 0x1157F ; Unknown + 0x11580, # .. 0x115B5 ; Siddham + 0x115B6, # .. 0x115B7 ; Unknown + 0x115B8, # .. 0x115DD ; Siddham + 0x115DE, # .. 0x115FF ; Unknown + 0x11600, # .. 0x11644 ; Modi + 0x11645, # .. 0x1164F ; Unknown + 0x11650, # .. 0x11659 ; Modi + 0x1165A, # .. 0x1165F ; Unknown + 0x11660, # .. 0x1166C ; Mongolian + 0x1166D, # .. 0x1167F ; Unknown + 0x11680, # .. 0x116B9 ; Takri + 0x116BA, # .. 0x116BF ; Unknown + 0x116C0, # .. 0x116C9 ; Takri + 0x116CA, # .. 0x116CF ; Unknown + 0x116D0, # .. 0x116E3 ; Myanmar + 0x116E4, # .. 0x116FF ; Unknown + 0x11700, # .. 0x1171A ; Ahom + 0x1171B, # .. 0x1171C ; Unknown + 0x1171D, # .. 0x1172B ; Ahom + 0x1172C, # .. 0x1172F ; Unknown + 0x11730, # .. 0x11746 ; Ahom + 0x11747, # .. 0x117FF ; Unknown + 0x11800, # .. 0x1183B ; Dogra + 0x1183C, # .. 0x1189F ; Unknown + 0x118A0, # .. 0x118F2 ; Warang_Citi + 0x118F3, # .. 0x118FE ; Unknown + 0x118FF, # .. 0x118FF ; Warang_Citi + 0x11900, # .. 0x11906 ; Dives_Akuru + 0x11907, # .. 0x11908 ; Unknown + 0x11909, # .. 0x11909 ; Dives_Akuru + 0x1190A, # .. 0x1190B ; Unknown + 0x1190C, # .. 0x11913 ; Dives_Akuru + 0x11914, # .. 0x11914 ; Unknown + 0x11915, # .. 0x11916 ; Dives_Akuru + 0x11917, # .. 0x11917 ; Unknown + 0x11918, # .. 0x11935 ; Dives_Akuru + 0x11936, # .. 0x11936 ; Unknown + 0x11937, # .. 0x11938 ; Dives_Akuru + 0x11939, # .. 0x1193A ; Unknown + 0x1193B, # .. 0x11946 ; Dives_Akuru + 0x11947, # .. 0x1194F ; Unknown + 0x11950, # .. 0x11959 ; Dives_Akuru + 0x1195A, # .. 0x1199F ; Unknown + 0x119A0, # .. 0x119A7 ; Nandinagari + 0x119A8, # .. 0x119A9 ; Unknown + 0x119AA, # .. 0x119D7 ; Nandinagari + 0x119D8, # .. 0x119D9 ; Unknown + 0x119DA, # .. 0x119E4 ; Nandinagari + 0x119E5, # .. 0x119FF ; Unknown + 0x11A00, # .. 0x11A47 ; Zanabazar_Square + 0x11A48, # .. 0x11A4F ; Unknown + 0x11A50, # .. 0x11AA2 ; Soyombo + 0x11AA3, # .. 0x11AAF ; Unknown + 0x11AB0, # .. 0x11ABF ; Canadian_Aboriginal + 0x11AC0, # .. 0x11AF8 ; Pau_Cin_Hau + 0x11AF9, # .. 0x11AFF ; Unknown + 0x11B00, # .. 0x11B09 ; Devanagari + 0x11B0A, # .. 0x11BBF ; Unknown + 0x11BC0, # .. 0x11BE1 ; Sunuwar + 0x11BE2, # .. 0x11BEF ; Unknown + 0x11BF0, # .. 0x11BF9 ; Sunuwar + 0x11BFA, # .. 0x11BFF ; Unknown + 0x11C00, # .. 0x11C08 ; Bhaiksuki + 0x11C09, # .. 0x11C09 ; Unknown + 0x11C0A, # .. 0x11C36 ; Bhaiksuki + 0x11C37, # .. 0x11C37 ; Unknown + 0x11C38, # .. 0x11C45 ; Bhaiksuki + 0x11C46, # .. 0x11C4F ; Unknown + 0x11C50, # .. 0x11C6C ; Bhaiksuki + 0x11C6D, # .. 0x11C6F ; Unknown + 0x11C70, # .. 0x11C8F ; Marchen + 0x11C90, # .. 0x11C91 ; Unknown + 0x11C92, # .. 0x11CA7 ; Marchen + 0x11CA8, # .. 0x11CA8 ; Unknown + 0x11CA9, # .. 0x11CB6 ; Marchen + 0x11CB7, # .. 0x11CFF ; Unknown + 0x11D00, # .. 0x11D06 ; Masaram_Gondi + 0x11D07, # .. 0x11D07 ; Unknown + 0x11D08, # .. 0x11D09 ; Masaram_Gondi + 0x11D0A, # .. 0x11D0A ; Unknown + 0x11D0B, # .. 0x11D36 ; Masaram_Gondi + 0x11D37, # .. 0x11D39 ; Unknown + 0x11D3A, # .. 0x11D3A ; Masaram_Gondi + 0x11D3B, # .. 0x11D3B ; Unknown + 0x11D3C, # .. 0x11D3D ; Masaram_Gondi + 0x11D3E, # .. 0x11D3E ; Unknown + 0x11D3F, # .. 0x11D47 ; Masaram_Gondi + 0x11D48, # .. 0x11D4F ; Unknown + 0x11D50, # .. 0x11D59 ; Masaram_Gondi + 0x11D5A, # .. 0x11D5F ; Unknown + 0x11D60, # .. 0x11D65 ; Gunjala_Gondi + 0x11D66, # .. 0x11D66 ; Unknown + 0x11D67, # .. 0x11D68 ; Gunjala_Gondi + 0x11D69, # .. 0x11D69 ; Unknown + 0x11D6A, # .. 0x11D8E ; Gunjala_Gondi + 0x11D8F, # .. 0x11D8F ; Unknown + 0x11D90, # .. 0x11D91 ; Gunjala_Gondi + 0x11D92, # .. 0x11D92 ; Unknown + 0x11D93, # .. 0x11D98 ; Gunjala_Gondi + 0x11D99, # .. 0x11D9F ; Unknown + 0x11DA0, # .. 0x11DA9 ; Gunjala_Gondi + 0x11DAA, # .. 0x11EDF ; Unknown + 0x11EE0, # .. 0x11EF8 ; Makasar + 0x11EF9, # .. 0x11EFF ; Unknown + 0x11F00, # .. 0x11F10 ; Kawi + 0x11F11, # .. 0x11F11 ; Unknown + 0x11F12, # .. 0x11F3A ; Kawi + 0x11F3B, # .. 0x11F3D ; Unknown + 0x11F3E, # .. 0x11F5A ; Kawi + 0x11F5B, # .. 0x11FAF ; Unknown + 0x11FB0, # .. 0x11FB0 ; Lisu + 0x11FB1, # .. 0x11FBF ; Unknown + 0x11FC0, # .. 0x11FF1 ; Tamil + 0x11FF2, # .. 0x11FFE ; Unknown + 0x11FFF, # .. 0x11FFF ; Tamil + 0x12000, # .. 0x12399 ; Cuneiform + 0x1239A, # .. 0x123FF ; Unknown + 0x12400, # .. 0x1246E ; Cuneiform + 0x1246F, # .. 0x1246F ; Unknown + 0x12470, # .. 0x12474 ; Cuneiform + 0x12475, # .. 0x1247F ; Unknown + 0x12480, # .. 0x12543 ; Cuneiform + 0x12544, # .. 0x12F8F ; Unknown + 0x12F90, # .. 0x12FF2 ; Cypro_Minoan + 0x12FF3, # .. 0x12FFF ; Unknown + 0x13000, # .. 0x13455 ; Egyptian_Hieroglyphs + 0x13456, # .. 0x1345F ; Unknown + 0x13460, # .. 0x143FA ; Egyptian_Hieroglyphs + 0x143FB, # .. 0x143FF ; Unknown + 0x14400, # .. 0x14646 ; Anatolian_Hieroglyphs + 0x14647, # .. 0x160FF ; Unknown + 0x16100, # .. 0x16139 ; Gurung_Khema + 0x1613A, # .. 0x167FF ; Unknown + 0x16800, # .. 0x16A38 ; Bamum + 0x16A39, # .. 0x16A3F ; Unknown + 0x16A40, # .. 0x16A5E ; Mro + 0x16A5F, # .. 0x16A5F ; Unknown + 0x16A60, # .. 0x16A69 ; Mro + 0x16A6A, # .. 0x16A6D ; Unknown + 0x16A6E, # .. 0x16A6F ; Mro + 0x16A70, # .. 0x16ABE ; Tangsa + 0x16ABF, # .. 0x16ABF ; Unknown + 0x16AC0, # .. 0x16AC9 ; Tangsa + 0x16ACA, # .. 0x16ACF ; Unknown + 0x16AD0, # .. 0x16AED ; Bassa_Vah + 0x16AEE, # .. 0x16AEF ; Unknown + 0x16AF0, # .. 0x16AF5 ; Bassa_Vah + 0x16AF6, # .. 0x16AFF ; Unknown + 0x16B00, # .. 0x16B45 ; Pahawh_Hmong + 0x16B46, # .. 0x16B4F ; Unknown + 0x16B50, # .. 0x16B59 ; Pahawh_Hmong + 0x16B5A, # .. 0x16B5A ; Unknown + 0x16B5B, # .. 0x16B61 ; Pahawh_Hmong + 0x16B62, # .. 0x16B62 ; Unknown + 0x16B63, # .. 0x16B77 ; Pahawh_Hmong + 0x16B78, # .. 0x16B7C ; Unknown + 0x16B7D, # .. 0x16B8F ; Pahawh_Hmong + 0x16B90, # .. 0x16D3F ; Unknown + 0x16D40, # .. 0x16D79 ; Kirat_Rai + 0x16D7A, # .. 0x16E3F ; Unknown + 0x16E40, # .. 0x16E9A ; Medefaidrin + 0x16E9B, # .. 0x16EFF ; Unknown + 0x16F00, # .. 0x16F4A ; Miao + 0x16F4B, # .. 0x16F4E ; Unknown + 0x16F4F, # .. 0x16F87 ; Miao + 0x16F88, # .. 0x16F8E ; Unknown + 0x16F8F, # .. 0x16F9F ; Miao + 0x16FA0, # .. 0x16FDF ; Unknown + 0x16FE0, # .. 0x16FE0 ; Tangut + 0x16FE1, # .. 0x16FE1 ; Nushu + 0x16FE2, # .. 0x16FE3 ; Han + 0x16FE4, # .. 0x16FE4 ; Khitan_Small_Script + 0x16FE5, # .. 0x16FEF ; Unknown + 0x16FF0, # .. 0x16FF1 ; Han + 0x16FF2, # .. 0x16FFF ; Unknown + 0x17000, # .. 0x187F7 ; Tangut + 0x187F8, # .. 0x187FF ; Unknown + 0x18800, # .. 0x18AFF ; Tangut + 0x18B00, # .. 0x18CD5 ; Khitan_Small_Script + 0x18CD6, # .. 0x18CFE ; Unknown + 0x18CFF, # .. 0x18CFF ; Khitan_Small_Script + 0x18D00, # .. 0x18D08 ; Tangut + 0x18D09, # .. 0x1AFEF ; Unknown + 0x1AFF0, # .. 0x1AFF3 ; Katakana + 0x1AFF4, # .. 0x1AFF4 ; Unknown + 0x1AFF5, # .. 0x1AFFB ; Katakana + 0x1AFFC, # .. 0x1AFFC ; Unknown + 0x1AFFD, # .. 0x1AFFE ; Katakana + 0x1AFFF, # .. 0x1AFFF ; Unknown + 0x1B000, # .. 0x1B000 ; Katakana + 0x1B001, # .. 0x1B11F ; Hiragana + 0x1B120, # .. 0x1B122 ; Katakana + 0x1B123, # .. 0x1B131 ; Unknown + 0x1B132, # .. 0x1B132 ; Hiragana + 0x1B133, # .. 0x1B14F ; Unknown + 0x1B150, # .. 0x1B152 ; Hiragana + 0x1B153, # .. 0x1B154 ; Unknown + 0x1B155, # .. 0x1B155 ; Katakana + 0x1B156, # .. 0x1B163 ; Unknown + 0x1B164, # .. 0x1B167 ; Katakana + 0x1B168, # .. 0x1B16F ; Unknown + 0x1B170, # .. 0x1B2FB ; Nushu + 0x1B2FC, # .. 0x1BBFF ; Unknown + 0x1BC00, # .. 0x1BC6A ; Duployan + 0x1BC6B, # .. 0x1BC6F ; Unknown + 0x1BC70, # .. 0x1BC7C ; Duployan + 0x1BC7D, # .. 0x1BC7F ; Unknown + 0x1BC80, # .. 0x1BC88 ; Duployan + 0x1BC89, # .. 0x1BC8F ; Unknown + 0x1BC90, # .. 0x1BC99 ; Duployan + 0x1BC9A, # .. 0x1BC9B ; Unknown + 0x1BC9C, # .. 0x1BC9F ; Duployan + 0x1BCA0, # .. 0x1BCA3 ; Common + 0x1BCA4, # .. 0x1CBFF ; Unknown + 0x1CC00, # .. 0x1CCF9 ; Common + 0x1CCFA, # .. 0x1CCFF ; Unknown + 0x1CD00, # .. 0x1CEB3 ; Common + 0x1CEB4, # .. 0x1CEFF ; Unknown + 0x1CF00, # .. 0x1CF2D ; Inherited + 0x1CF2E, # .. 0x1CF2F ; Unknown + 0x1CF30, # .. 0x1CF46 ; Inherited + 0x1CF47, # .. 0x1CF4F ; Unknown + 0x1CF50, # .. 0x1CFC3 ; Common + 0x1CFC4, # .. 0x1CFFF ; Unknown + 0x1D000, # .. 0x1D0F5 ; Common + 0x1D0F6, # .. 0x1D0FF ; Unknown + 0x1D100, # .. 0x1D126 ; Common + 0x1D127, # .. 0x1D128 ; Unknown + 0x1D129, # .. 0x1D166 ; Common + 0x1D167, # .. 0x1D169 ; Inherited + 0x1D16A, # .. 0x1D17A ; Common + 0x1D17B, # .. 0x1D182 ; Inherited + 0x1D183, # .. 0x1D184 ; Common + 0x1D185, # .. 0x1D18B ; Inherited + 0x1D18C, # .. 0x1D1A9 ; Common + 0x1D1AA, # .. 0x1D1AD ; Inherited + 0x1D1AE, # .. 0x1D1EA ; Common + 0x1D1EB, # .. 0x1D1FF ; Unknown + 0x1D200, # .. 0x1D245 ; Greek + 0x1D246, # .. 0x1D2BF ; Unknown + 0x1D2C0, # .. 0x1D2D3 ; Common + 0x1D2D4, # .. 0x1D2DF ; Unknown + 0x1D2E0, # .. 0x1D2F3 ; Common + 0x1D2F4, # .. 0x1D2FF ; Unknown + 0x1D300, # .. 0x1D356 ; Common + 0x1D357, # .. 0x1D35F ; Unknown + 0x1D360, # .. 0x1D378 ; Common + 0x1D379, # .. 0x1D3FF ; Unknown + 0x1D400, # .. 0x1D454 ; Common + 0x1D455, # .. 0x1D455 ; Unknown + 0x1D456, # .. 0x1D49C ; Common + 0x1D49D, # .. 0x1D49D ; Unknown + 0x1D49E, # .. 0x1D49F ; Common + 0x1D4A0, # .. 0x1D4A1 ; Unknown + 0x1D4A2, # .. 0x1D4A2 ; Common + 0x1D4A3, # .. 0x1D4A4 ; Unknown + 0x1D4A5, # .. 0x1D4A6 ; Common + 0x1D4A7, # .. 0x1D4A8 ; Unknown + 0x1D4A9, # .. 0x1D4AC ; Common + 0x1D4AD, # .. 0x1D4AD ; Unknown + 0x1D4AE, # .. 0x1D4B9 ; Common + 0x1D4BA, # .. 0x1D4BA ; Unknown + 0x1D4BB, # .. 0x1D4BB ; Common + 0x1D4BC, # .. 0x1D4BC ; Unknown + 0x1D4BD, # .. 0x1D4C3 ; Common + 0x1D4C4, # .. 0x1D4C4 ; Unknown + 0x1D4C5, # .. 0x1D505 ; Common + 0x1D506, # .. 0x1D506 ; Unknown + 0x1D507, # .. 0x1D50A ; Common + 0x1D50B, # .. 0x1D50C ; Unknown + 0x1D50D, # .. 0x1D514 ; Common + 0x1D515, # .. 0x1D515 ; Unknown + 0x1D516, # .. 0x1D51C ; Common + 0x1D51D, # .. 0x1D51D ; Unknown + 0x1D51E, # .. 0x1D539 ; Common + 0x1D53A, # .. 0x1D53A ; Unknown + 0x1D53B, # .. 0x1D53E ; Common + 0x1D53F, # .. 0x1D53F ; Unknown + 0x1D540, # .. 0x1D544 ; Common + 0x1D545, # .. 0x1D545 ; Unknown + 0x1D546, # .. 0x1D546 ; Common + 0x1D547, # .. 0x1D549 ; Unknown + 0x1D54A, # .. 0x1D550 ; Common + 0x1D551, # .. 0x1D551 ; Unknown + 0x1D552, # .. 0x1D6A5 ; Common + 0x1D6A6, # .. 0x1D6A7 ; Unknown + 0x1D6A8, # .. 0x1D7CB ; Common + 0x1D7CC, # .. 0x1D7CD ; Unknown + 0x1D7CE, # .. 0x1D7FF ; Common + 0x1D800, # .. 0x1DA8B ; SignWriting + 0x1DA8C, # .. 0x1DA9A ; Unknown + 0x1DA9B, # .. 0x1DA9F ; SignWriting + 0x1DAA0, # .. 0x1DAA0 ; Unknown + 0x1DAA1, # .. 0x1DAAF ; SignWriting + 0x1DAB0, # .. 0x1DEFF ; Unknown + 0x1DF00, # .. 0x1DF1E ; Latin + 0x1DF1F, # .. 0x1DF24 ; Unknown + 0x1DF25, # .. 0x1DF2A ; Latin + 0x1DF2B, # .. 0x1DFFF ; Unknown + 0x1E000, # .. 0x1E006 ; Glagolitic + 0x1E007, # .. 0x1E007 ; Unknown + 0x1E008, # .. 0x1E018 ; Glagolitic + 0x1E019, # .. 0x1E01A ; Unknown + 0x1E01B, # .. 0x1E021 ; Glagolitic + 0x1E022, # .. 0x1E022 ; Unknown + 0x1E023, # .. 0x1E024 ; Glagolitic + 0x1E025, # .. 0x1E025 ; Unknown + 0x1E026, # .. 0x1E02A ; Glagolitic + 0x1E02B, # .. 0x1E02F ; Unknown + 0x1E030, # .. 0x1E06D ; Cyrillic + 0x1E06E, # .. 0x1E08E ; Unknown + 0x1E08F, # .. 0x1E08F ; Cyrillic + 0x1E090, # .. 0x1E0FF ; Unknown + 0x1E100, # .. 0x1E12C ; Nyiakeng_Puachue_Hmong + 0x1E12D, # .. 0x1E12F ; Unknown + 0x1E130, # .. 0x1E13D ; Nyiakeng_Puachue_Hmong + 0x1E13E, # .. 0x1E13F ; Unknown + 0x1E140, # .. 0x1E149 ; Nyiakeng_Puachue_Hmong + 0x1E14A, # .. 0x1E14D ; Unknown + 0x1E14E, # .. 0x1E14F ; Nyiakeng_Puachue_Hmong + 0x1E150, # .. 0x1E28F ; Unknown + 0x1E290, # .. 0x1E2AE ; Toto + 0x1E2AF, # .. 0x1E2BF ; Unknown + 0x1E2C0, # .. 0x1E2F9 ; Wancho + 0x1E2FA, # .. 0x1E2FE ; Unknown + 0x1E2FF, # .. 0x1E2FF ; Wancho + 0x1E300, # .. 0x1E4CF ; Unknown + 0x1E4D0, # .. 0x1E4F9 ; Nag_Mundari + 0x1E4FA, # .. 0x1E5CF ; Unknown + 0x1E5D0, # .. 0x1E5FA ; Ol_Onal + 0x1E5FB, # .. 0x1E5FE ; Unknown + 0x1E5FF, # .. 0x1E5FF ; Ol_Onal + 0x1E600, # .. 0x1E7DF ; Unknown + 0x1E7E0, # .. 0x1E7E6 ; Ethiopic + 0x1E7E7, # .. 0x1E7E7 ; Unknown + 0x1E7E8, # .. 0x1E7EB ; Ethiopic + 0x1E7EC, # .. 0x1E7EC ; Unknown + 0x1E7ED, # .. 0x1E7EE ; Ethiopic + 0x1E7EF, # .. 0x1E7EF ; Unknown + 0x1E7F0, # .. 0x1E7FE ; Ethiopic + 0x1E7FF, # .. 0x1E7FF ; Unknown + 0x1E800, # .. 0x1E8C4 ; Mende_Kikakui + 0x1E8C5, # .. 0x1E8C6 ; Unknown + 0x1E8C7, # .. 0x1E8D6 ; Mende_Kikakui + 0x1E8D7, # .. 0x1E8FF ; Unknown + 0x1E900, # .. 0x1E94B ; Adlam + 0x1E94C, # .. 0x1E94F ; Unknown + 0x1E950, # .. 0x1E959 ; Adlam + 0x1E95A, # .. 0x1E95D ; Unknown + 0x1E95E, # .. 0x1E95F ; Adlam + 0x1E960, # .. 0x1EC70 ; Unknown + 0x1EC71, # .. 0x1ECB4 ; Common + 0x1ECB5, # .. 0x1ED00 ; Unknown + 0x1ED01, # .. 0x1ED3D ; Common + 0x1ED3E, # .. 0x1EDFF ; Unknown + 0x1EE00, # .. 0x1EE03 ; Arabic + 0x1EE04, # .. 0x1EE04 ; Unknown + 0x1EE05, # .. 0x1EE1F ; Arabic + 0x1EE20, # .. 0x1EE20 ; Unknown + 0x1EE21, # .. 0x1EE22 ; Arabic + 0x1EE23, # .. 0x1EE23 ; Unknown + 0x1EE24, # .. 0x1EE24 ; Arabic + 0x1EE25, # .. 0x1EE26 ; Unknown + 0x1EE27, # .. 0x1EE27 ; Arabic + 0x1EE28, # .. 0x1EE28 ; Unknown + 0x1EE29, # .. 0x1EE32 ; Arabic + 0x1EE33, # .. 0x1EE33 ; Unknown + 0x1EE34, # .. 0x1EE37 ; Arabic + 0x1EE38, # .. 0x1EE38 ; Unknown + 0x1EE39, # .. 0x1EE39 ; Arabic + 0x1EE3A, # .. 0x1EE3A ; Unknown + 0x1EE3B, # .. 0x1EE3B ; Arabic + 0x1EE3C, # .. 0x1EE41 ; Unknown + 0x1EE42, # .. 0x1EE42 ; Arabic + 0x1EE43, # .. 0x1EE46 ; Unknown + 0x1EE47, # .. 0x1EE47 ; Arabic + 0x1EE48, # .. 0x1EE48 ; Unknown + 0x1EE49, # .. 0x1EE49 ; Arabic + 0x1EE4A, # .. 0x1EE4A ; Unknown + 0x1EE4B, # .. 0x1EE4B ; Arabic + 0x1EE4C, # .. 0x1EE4C ; Unknown + 0x1EE4D, # .. 0x1EE4F ; Arabic + 0x1EE50, # .. 0x1EE50 ; Unknown + 0x1EE51, # .. 0x1EE52 ; Arabic + 0x1EE53, # .. 0x1EE53 ; Unknown + 0x1EE54, # .. 0x1EE54 ; Arabic + 0x1EE55, # .. 0x1EE56 ; Unknown + 0x1EE57, # .. 0x1EE57 ; Arabic + 0x1EE58, # .. 0x1EE58 ; Unknown + 0x1EE59, # .. 0x1EE59 ; Arabic + 0x1EE5A, # .. 0x1EE5A ; Unknown + 0x1EE5B, # .. 0x1EE5B ; Arabic + 0x1EE5C, # .. 0x1EE5C ; Unknown + 0x1EE5D, # .. 0x1EE5D ; Arabic + 0x1EE5E, # .. 0x1EE5E ; Unknown + 0x1EE5F, # .. 0x1EE5F ; Arabic + 0x1EE60, # .. 0x1EE60 ; Unknown + 0x1EE61, # .. 0x1EE62 ; Arabic + 0x1EE63, # .. 0x1EE63 ; Unknown + 0x1EE64, # .. 0x1EE64 ; Arabic + 0x1EE65, # .. 0x1EE66 ; Unknown + 0x1EE67, # .. 0x1EE6A ; Arabic + 0x1EE6B, # .. 0x1EE6B ; Unknown + 0x1EE6C, # .. 0x1EE72 ; Arabic + 0x1EE73, # .. 0x1EE73 ; Unknown + 0x1EE74, # .. 0x1EE77 ; Arabic + 0x1EE78, # .. 0x1EE78 ; Unknown + 0x1EE79, # .. 0x1EE7C ; Arabic + 0x1EE7D, # .. 0x1EE7D ; Unknown + 0x1EE7E, # .. 0x1EE7E ; Arabic + 0x1EE7F, # .. 0x1EE7F ; Unknown + 0x1EE80, # .. 0x1EE89 ; Arabic + 0x1EE8A, # .. 0x1EE8A ; Unknown + 0x1EE8B, # .. 0x1EE9B ; Arabic + 0x1EE9C, # .. 0x1EEA0 ; Unknown + 0x1EEA1, # .. 0x1EEA3 ; Arabic + 0x1EEA4, # .. 0x1EEA4 ; Unknown + 0x1EEA5, # .. 0x1EEA9 ; Arabic + 0x1EEAA, # .. 0x1EEAA ; Unknown + 0x1EEAB, # .. 0x1EEBB ; Arabic + 0x1EEBC, # .. 0x1EEEF ; Unknown + 0x1EEF0, # .. 0x1EEF1 ; Arabic + 0x1EEF2, # .. 0x1EFFF ; Unknown + 0x1F000, # .. 0x1F02B ; Common + 0x1F02C, # .. 0x1F02F ; Unknown + 0x1F030, # .. 0x1F093 ; Common + 0x1F094, # .. 0x1F09F ; Unknown + 0x1F0A0, # .. 0x1F0AE ; Common + 0x1F0AF, # .. 0x1F0B0 ; Unknown + 0x1F0B1, # .. 0x1F0BF ; Common + 0x1F0C0, # .. 0x1F0C0 ; Unknown + 0x1F0C1, # .. 0x1F0CF ; Common + 0x1F0D0, # .. 0x1F0D0 ; Unknown + 0x1F0D1, # .. 0x1F0F5 ; Common + 0x1F0F6, # .. 0x1F0FF ; Unknown + 0x1F100, # .. 0x1F1AD ; Common + 0x1F1AE, # .. 0x1F1E5 ; Unknown + 0x1F1E6, # .. 0x1F1FF ; Common + 0x1F200, # .. 0x1F200 ; Hiragana + 0x1F201, # .. 0x1F202 ; Common + 0x1F203, # .. 0x1F20F ; Unknown + 0x1F210, # .. 0x1F23B ; Common + 0x1F23C, # .. 0x1F23F ; Unknown + 0x1F240, # .. 0x1F248 ; Common + 0x1F249, # .. 0x1F24F ; Unknown + 0x1F250, # .. 0x1F251 ; Common + 0x1F252, # .. 0x1F25F ; Unknown + 0x1F260, # .. 0x1F265 ; Common + 0x1F266, # .. 0x1F2FF ; Unknown + 0x1F300, # .. 0x1F6D7 ; Common + 0x1F6D8, # .. 0x1F6DB ; Unknown + 0x1F6DC, # .. 0x1F6EC ; Common + 0x1F6ED, # .. 0x1F6EF ; Unknown + 0x1F6F0, # .. 0x1F6FC ; Common + 0x1F6FD, # .. 0x1F6FF ; Unknown + 0x1F700, # .. 0x1F776 ; Common + 0x1F777, # .. 0x1F77A ; Unknown + 0x1F77B, # .. 0x1F7D9 ; Common + 0x1F7DA, # .. 0x1F7DF ; Unknown + 0x1F7E0, # .. 0x1F7EB ; Common + 0x1F7EC, # .. 0x1F7EF ; Unknown + 0x1F7F0, # .. 0x1F7F0 ; Common + 0x1F7F1, # .. 0x1F7FF ; Unknown + 0x1F800, # .. 0x1F80B ; Common + 0x1F80C, # .. 0x1F80F ; Unknown + 0x1F810, # .. 0x1F847 ; Common + 0x1F848, # .. 0x1F84F ; Unknown + 0x1F850, # .. 0x1F859 ; Common + 0x1F85A, # .. 0x1F85F ; Unknown + 0x1F860, # .. 0x1F887 ; Common + 0x1F888, # .. 0x1F88F ; Unknown + 0x1F890, # .. 0x1F8AD ; Common + 0x1F8AE, # .. 0x1F8AF ; Unknown + 0x1F8B0, # .. 0x1F8BB ; Common + 0x1F8BC, # .. 0x1F8BF ; Unknown + 0x1F8C0, # .. 0x1F8C1 ; Common + 0x1F8C2, # .. 0x1F8FF ; Unknown + 0x1F900, # .. 0x1FA53 ; Common + 0x1FA54, # .. 0x1FA5F ; Unknown + 0x1FA60, # .. 0x1FA6D ; Common + 0x1FA6E, # .. 0x1FA6F ; Unknown + 0x1FA70, # .. 0x1FA7C ; Common + 0x1FA7D, # .. 0x1FA7F ; Unknown + 0x1FA80, # .. 0x1FA89 ; Common + 0x1FA8A, # .. 0x1FA8E ; Unknown + 0x1FA8F, # .. 0x1FAC6 ; Common + 0x1FAC7, # .. 0x1FACD ; Unknown + 0x1FACE, # .. 0x1FADC ; Common + 0x1FADD, # .. 0x1FADE ; Unknown + 0x1FADF, # .. 0x1FAE9 ; Common + 0x1FAEA, # .. 0x1FAEF ; Unknown + 0x1FAF0, # .. 0x1FAF8 ; Common + 0x1FAF9, # .. 0x1FAFF ; Unknown + 0x1FB00, # .. 0x1FB92 ; Common + 0x1FB93, # .. 0x1FB93 ; Unknown + 0x1FB94, # .. 0x1FBF9 ; Common + 0x1FBFA, # .. 0x1FFFF ; Unknown + 0x20000, # .. 0x2A6DF ; Han + 0x2A6E0, # .. 0x2A6FF ; Unknown + 0x2A700, # .. 0x2B739 ; Han + 0x2B73A, # .. 0x2B73F ; Unknown + 0x2B740, # .. 0x2B81D ; Han + 0x2B81E, # .. 0x2B81F ; Unknown + 0x2B820, # .. 0x2CEA1 ; Han + 0x2CEA2, # .. 0x2CEAF ; Unknown + 0x2CEB0, # .. 0x2EBE0 ; Han + 0x2EBE1, # .. 0x2EBEF ; Unknown + 0x2EBF0, # .. 0x2EE5D ; Han + 0x2EE5E, # .. 0x2F7FF ; Unknown + 0x2F800, # .. 0x2FA1D ; Han + 0x2FA1E, # .. 0x2FFFF ; Unknown + 0x30000, # .. 0x3134A ; Han + 0x3134B, # .. 0x3134F ; Unknown + 0x31350, # .. 0x323AF ; Han + 0x323B0, # .. 0xE0000 ; Unknown + 0xE0001, # .. 0xE0001 ; Common + 0xE0002, # .. 0xE001F ; Unknown + 0xE0020, # .. 0xE007F ; Common + 0xE0080, # .. 0xE00FF ; Unknown + 0xE0100, # .. 0xE01EF ; Inherited + 0xE01F0, # .. 0x10FFFF ; Unknown +] + +VALUES = [ + "Zyyy", # 0000..0040 ; Common + "Latn", # 0041..005A ; Latin + "Zyyy", # 005B..0060 ; Common + "Latn", # 0061..007A ; Latin + "Zyyy", # 007B..00A9 ; Common + "Latn", # 00AA..00AA ; Latin + "Zyyy", # 00AB..00B9 ; Common + "Latn", # 00BA..00BA ; Latin + "Zyyy", # 00BB..00BF ; Common + "Latn", # 00C0..00D6 ; Latin + "Zyyy", # 00D7..00D7 ; Common + "Latn", # 00D8..00F6 ; Latin + "Zyyy", # 00F7..00F7 ; Common + "Latn", # 00F8..02B8 ; Latin + "Zyyy", # 02B9..02DF ; Common + "Latn", # 02E0..02E4 ; Latin + "Zyyy", # 02E5..02E9 ; Common + "Bopo", # 02EA..02EB ; Bopomofo + "Zyyy", # 02EC..02FF ; Common + "Zinh", # 0300..036F ; Inherited + "Grek", # 0370..0373 ; Greek + "Zyyy", # 0374..0374 ; Common + "Grek", # 0375..0377 ; Greek + "Zzzz", # 0378..0379 ; Unknown + "Grek", # 037A..037D ; Greek + "Zyyy", # 037E..037E ; Common + "Grek", # 037F..037F ; Greek + "Zzzz", # 0380..0383 ; Unknown + "Grek", # 0384..0384 ; Greek + "Zyyy", # 0385..0385 ; Common + "Grek", # 0386..0386 ; Greek + "Zyyy", # 0387..0387 ; Common + "Grek", # 0388..038A ; Greek + "Zzzz", # 038B..038B ; Unknown + "Grek", # 038C..038C ; Greek + "Zzzz", # 038D..038D ; Unknown + "Grek", # 038E..03A1 ; Greek + "Zzzz", # 03A2..03A2 ; Unknown + "Grek", # 03A3..03E1 ; Greek + "Copt", # 03E2..03EF ; Coptic + "Grek", # 03F0..03FF ; Greek + "Cyrl", # 0400..0484 ; Cyrillic + "Zinh", # 0485..0486 ; Inherited + "Cyrl", # 0487..052F ; Cyrillic + "Zzzz", # 0530..0530 ; Unknown + "Armn", # 0531..0556 ; Armenian + "Zzzz", # 0557..0558 ; Unknown + "Armn", # 0559..058A ; Armenian + "Zzzz", # 058B..058C ; Unknown + "Armn", # 058D..058F ; Armenian + "Zzzz", # 0590..0590 ; Unknown + "Hebr", # 0591..05C7 ; Hebrew + "Zzzz", # 05C8..05CF ; Unknown + "Hebr", # 05D0..05EA ; Hebrew + "Zzzz", # 05EB..05EE ; Unknown + "Hebr", # 05EF..05F4 ; Hebrew + "Zzzz", # 05F5..05FF ; Unknown + "Arab", # 0600..0604 ; Arabic + "Zyyy", # 0605..0605 ; Common + "Arab", # 0606..060B ; Arabic + "Zyyy", # 060C..060C ; Common + "Arab", # 060D..061A ; Arabic + "Zyyy", # 061B..061B ; Common + "Arab", # 061C..061E ; Arabic + "Zyyy", # 061F..061F ; Common + "Arab", # 0620..063F ; Arabic + "Zyyy", # 0640..0640 ; Common + "Arab", # 0641..064A ; Arabic + "Zinh", # 064B..0655 ; Inherited + "Arab", # 0656..066F ; Arabic + "Zinh", # 0670..0670 ; Inherited + "Arab", # 0671..06DC ; Arabic + "Zyyy", # 06DD..06DD ; Common + "Arab", # 06DE..06FF ; Arabic + "Syrc", # 0700..070D ; Syriac + "Zzzz", # 070E..070E ; Unknown + "Syrc", # 070F..074A ; Syriac + "Zzzz", # 074B..074C ; Unknown + "Syrc", # 074D..074F ; Syriac + "Arab", # 0750..077F ; Arabic + "Thaa", # 0780..07B1 ; Thaana + "Zzzz", # 07B2..07BF ; Unknown + "Nkoo", # 07C0..07FA ; Nko + "Zzzz", # 07FB..07FC ; Unknown + "Nkoo", # 07FD..07FF ; Nko + "Samr", # 0800..082D ; Samaritan + "Zzzz", # 082E..082F ; Unknown + "Samr", # 0830..083E ; Samaritan + "Zzzz", # 083F..083F ; Unknown + "Mand", # 0840..085B ; Mandaic + "Zzzz", # 085C..085D ; Unknown + "Mand", # 085E..085E ; Mandaic + "Zzzz", # 085F..085F ; Unknown + "Syrc", # 0860..086A ; Syriac + "Zzzz", # 086B..086F ; Unknown + "Arab", # 0870..088E ; Arabic + "Zzzz", # 088F..088F ; Unknown + "Arab", # 0890..0891 ; Arabic + "Zzzz", # 0892..0896 ; Unknown + "Arab", # 0897..08E1 ; Arabic + "Zyyy", # 08E2..08E2 ; Common + "Arab", # 08E3..08FF ; Arabic + "Deva", # 0900..0950 ; Devanagari + "Zinh", # 0951..0954 ; Inherited + "Deva", # 0955..0963 ; Devanagari + "Zyyy", # 0964..0965 ; Common + "Deva", # 0966..097F ; Devanagari + "Beng", # 0980..0983 ; Bengali + "Zzzz", # 0984..0984 ; Unknown + "Beng", # 0985..098C ; Bengali + "Zzzz", # 098D..098E ; Unknown + "Beng", # 098F..0990 ; Bengali + "Zzzz", # 0991..0992 ; Unknown + "Beng", # 0993..09A8 ; Bengali + "Zzzz", # 09A9..09A9 ; Unknown + "Beng", # 09AA..09B0 ; Bengali + "Zzzz", # 09B1..09B1 ; Unknown + "Beng", # 09B2..09B2 ; Bengali + "Zzzz", # 09B3..09B5 ; Unknown + "Beng", # 09B6..09B9 ; Bengali + "Zzzz", # 09BA..09BB ; Unknown + "Beng", # 09BC..09C4 ; Bengali + "Zzzz", # 09C5..09C6 ; Unknown + "Beng", # 09C7..09C8 ; Bengali + "Zzzz", # 09C9..09CA ; Unknown + "Beng", # 09CB..09CE ; Bengali + "Zzzz", # 09CF..09D6 ; Unknown + "Beng", # 09D7..09D7 ; Bengali + "Zzzz", # 09D8..09DB ; Unknown + "Beng", # 09DC..09DD ; Bengali + "Zzzz", # 09DE..09DE ; Unknown + "Beng", # 09DF..09E3 ; Bengali + "Zzzz", # 09E4..09E5 ; Unknown + "Beng", # 09E6..09FE ; Bengali + "Zzzz", # 09FF..0A00 ; Unknown + "Guru", # 0A01..0A03 ; Gurmukhi + "Zzzz", # 0A04..0A04 ; Unknown + "Guru", # 0A05..0A0A ; Gurmukhi + "Zzzz", # 0A0B..0A0E ; Unknown + "Guru", # 0A0F..0A10 ; Gurmukhi + "Zzzz", # 0A11..0A12 ; Unknown + "Guru", # 0A13..0A28 ; Gurmukhi + "Zzzz", # 0A29..0A29 ; Unknown + "Guru", # 0A2A..0A30 ; Gurmukhi + "Zzzz", # 0A31..0A31 ; Unknown + "Guru", # 0A32..0A33 ; Gurmukhi + "Zzzz", # 0A34..0A34 ; Unknown + "Guru", # 0A35..0A36 ; Gurmukhi + "Zzzz", # 0A37..0A37 ; Unknown + "Guru", # 0A38..0A39 ; Gurmukhi + "Zzzz", # 0A3A..0A3B ; Unknown + "Guru", # 0A3C..0A3C ; Gurmukhi + "Zzzz", # 0A3D..0A3D ; Unknown + "Guru", # 0A3E..0A42 ; Gurmukhi + "Zzzz", # 0A43..0A46 ; Unknown + "Guru", # 0A47..0A48 ; Gurmukhi + "Zzzz", # 0A49..0A4A ; Unknown + "Guru", # 0A4B..0A4D ; Gurmukhi + "Zzzz", # 0A4E..0A50 ; Unknown + "Guru", # 0A51..0A51 ; Gurmukhi + "Zzzz", # 0A52..0A58 ; Unknown + "Guru", # 0A59..0A5C ; Gurmukhi + "Zzzz", # 0A5D..0A5D ; Unknown + "Guru", # 0A5E..0A5E ; Gurmukhi + "Zzzz", # 0A5F..0A65 ; Unknown + "Guru", # 0A66..0A76 ; Gurmukhi + "Zzzz", # 0A77..0A80 ; Unknown + "Gujr", # 0A81..0A83 ; Gujarati + "Zzzz", # 0A84..0A84 ; Unknown + "Gujr", # 0A85..0A8D ; Gujarati + "Zzzz", # 0A8E..0A8E ; Unknown + "Gujr", # 0A8F..0A91 ; Gujarati + "Zzzz", # 0A92..0A92 ; Unknown + "Gujr", # 0A93..0AA8 ; Gujarati + "Zzzz", # 0AA9..0AA9 ; Unknown + "Gujr", # 0AAA..0AB0 ; Gujarati + "Zzzz", # 0AB1..0AB1 ; Unknown + "Gujr", # 0AB2..0AB3 ; Gujarati + "Zzzz", # 0AB4..0AB4 ; Unknown + "Gujr", # 0AB5..0AB9 ; Gujarati + "Zzzz", # 0ABA..0ABB ; Unknown + "Gujr", # 0ABC..0AC5 ; Gujarati + "Zzzz", # 0AC6..0AC6 ; Unknown + "Gujr", # 0AC7..0AC9 ; Gujarati + "Zzzz", # 0ACA..0ACA ; Unknown + "Gujr", # 0ACB..0ACD ; Gujarati + "Zzzz", # 0ACE..0ACF ; Unknown + "Gujr", # 0AD0..0AD0 ; Gujarati + "Zzzz", # 0AD1..0ADF ; Unknown + "Gujr", # 0AE0..0AE3 ; Gujarati + "Zzzz", # 0AE4..0AE5 ; Unknown + "Gujr", # 0AE6..0AF1 ; Gujarati + "Zzzz", # 0AF2..0AF8 ; Unknown + "Gujr", # 0AF9..0AFF ; Gujarati + "Zzzz", # 0B00..0B00 ; Unknown + "Orya", # 0B01..0B03 ; Oriya + "Zzzz", # 0B04..0B04 ; Unknown + "Orya", # 0B05..0B0C ; Oriya + "Zzzz", # 0B0D..0B0E ; Unknown + "Orya", # 0B0F..0B10 ; Oriya + "Zzzz", # 0B11..0B12 ; Unknown + "Orya", # 0B13..0B28 ; Oriya + "Zzzz", # 0B29..0B29 ; Unknown + "Orya", # 0B2A..0B30 ; Oriya + "Zzzz", # 0B31..0B31 ; Unknown + "Orya", # 0B32..0B33 ; Oriya + "Zzzz", # 0B34..0B34 ; Unknown + "Orya", # 0B35..0B39 ; Oriya + "Zzzz", # 0B3A..0B3B ; Unknown + "Orya", # 0B3C..0B44 ; Oriya + "Zzzz", # 0B45..0B46 ; Unknown + "Orya", # 0B47..0B48 ; Oriya + "Zzzz", # 0B49..0B4A ; Unknown + "Orya", # 0B4B..0B4D ; Oriya + "Zzzz", # 0B4E..0B54 ; Unknown + "Orya", # 0B55..0B57 ; Oriya + "Zzzz", # 0B58..0B5B ; Unknown + "Orya", # 0B5C..0B5D ; Oriya + "Zzzz", # 0B5E..0B5E ; Unknown + "Orya", # 0B5F..0B63 ; Oriya + "Zzzz", # 0B64..0B65 ; Unknown + "Orya", # 0B66..0B77 ; Oriya + "Zzzz", # 0B78..0B81 ; Unknown + "Taml", # 0B82..0B83 ; Tamil + "Zzzz", # 0B84..0B84 ; Unknown + "Taml", # 0B85..0B8A ; Tamil + "Zzzz", # 0B8B..0B8D ; Unknown + "Taml", # 0B8E..0B90 ; Tamil + "Zzzz", # 0B91..0B91 ; Unknown + "Taml", # 0B92..0B95 ; Tamil + "Zzzz", # 0B96..0B98 ; Unknown + "Taml", # 0B99..0B9A ; Tamil + "Zzzz", # 0B9B..0B9B ; Unknown + "Taml", # 0B9C..0B9C ; Tamil + "Zzzz", # 0B9D..0B9D ; Unknown + "Taml", # 0B9E..0B9F ; Tamil + "Zzzz", # 0BA0..0BA2 ; Unknown + "Taml", # 0BA3..0BA4 ; Tamil + "Zzzz", # 0BA5..0BA7 ; Unknown + "Taml", # 0BA8..0BAA ; Tamil + "Zzzz", # 0BAB..0BAD ; Unknown + "Taml", # 0BAE..0BB9 ; Tamil + "Zzzz", # 0BBA..0BBD ; Unknown + "Taml", # 0BBE..0BC2 ; Tamil + "Zzzz", # 0BC3..0BC5 ; Unknown + "Taml", # 0BC6..0BC8 ; Tamil + "Zzzz", # 0BC9..0BC9 ; Unknown + "Taml", # 0BCA..0BCD ; Tamil + "Zzzz", # 0BCE..0BCF ; Unknown + "Taml", # 0BD0..0BD0 ; Tamil + "Zzzz", # 0BD1..0BD6 ; Unknown + "Taml", # 0BD7..0BD7 ; Tamil + "Zzzz", # 0BD8..0BE5 ; Unknown + "Taml", # 0BE6..0BFA ; Tamil + "Zzzz", # 0BFB..0BFF ; Unknown + "Telu", # 0C00..0C0C ; Telugu + "Zzzz", # 0C0D..0C0D ; Unknown + "Telu", # 0C0E..0C10 ; Telugu + "Zzzz", # 0C11..0C11 ; Unknown + "Telu", # 0C12..0C28 ; Telugu + "Zzzz", # 0C29..0C29 ; Unknown + "Telu", # 0C2A..0C39 ; Telugu + "Zzzz", # 0C3A..0C3B ; Unknown + "Telu", # 0C3C..0C44 ; Telugu + "Zzzz", # 0C45..0C45 ; Unknown + "Telu", # 0C46..0C48 ; Telugu + "Zzzz", # 0C49..0C49 ; Unknown + "Telu", # 0C4A..0C4D ; Telugu + "Zzzz", # 0C4E..0C54 ; Unknown + "Telu", # 0C55..0C56 ; Telugu + "Zzzz", # 0C57..0C57 ; Unknown + "Telu", # 0C58..0C5A ; Telugu + "Zzzz", # 0C5B..0C5C ; Unknown + "Telu", # 0C5D..0C5D ; Telugu + "Zzzz", # 0C5E..0C5F ; Unknown + "Telu", # 0C60..0C63 ; Telugu + "Zzzz", # 0C64..0C65 ; Unknown + "Telu", # 0C66..0C6F ; Telugu + "Zzzz", # 0C70..0C76 ; Unknown + "Telu", # 0C77..0C7F ; Telugu + "Knda", # 0C80..0C8C ; Kannada + "Zzzz", # 0C8D..0C8D ; Unknown + "Knda", # 0C8E..0C90 ; Kannada + "Zzzz", # 0C91..0C91 ; Unknown + "Knda", # 0C92..0CA8 ; Kannada + "Zzzz", # 0CA9..0CA9 ; Unknown + "Knda", # 0CAA..0CB3 ; Kannada + "Zzzz", # 0CB4..0CB4 ; Unknown + "Knda", # 0CB5..0CB9 ; Kannada + "Zzzz", # 0CBA..0CBB ; Unknown + "Knda", # 0CBC..0CC4 ; Kannada + "Zzzz", # 0CC5..0CC5 ; Unknown + "Knda", # 0CC6..0CC8 ; Kannada + "Zzzz", # 0CC9..0CC9 ; Unknown + "Knda", # 0CCA..0CCD ; Kannada + "Zzzz", # 0CCE..0CD4 ; Unknown + "Knda", # 0CD5..0CD6 ; Kannada + "Zzzz", # 0CD7..0CDC ; Unknown + "Knda", # 0CDD..0CDE ; Kannada + "Zzzz", # 0CDF..0CDF ; Unknown + "Knda", # 0CE0..0CE3 ; Kannada + "Zzzz", # 0CE4..0CE5 ; Unknown + "Knda", # 0CE6..0CEF ; Kannada + "Zzzz", # 0CF0..0CF0 ; Unknown + "Knda", # 0CF1..0CF3 ; Kannada + "Zzzz", # 0CF4..0CFF ; Unknown + "Mlym", # 0D00..0D0C ; Malayalam + "Zzzz", # 0D0D..0D0D ; Unknown + "Mlym", # 0D0E..0D10 ; Malayalam + "Zzzz", # 0D11..0D11 ; Unknown + "Mlym", # 0D12..0D44 ; Malayalam + "Zzzz", # 0D45..0D45 ; Unknown + "Mlym", # 0D46..0D48 ; Malayalam + "Zzzz", # 0D49..0D49 ; Unknown + "Mlym", # 0D4A..0D4F ; Malayalam + "Zzzz", # 0D50..0D53 ; Unknown + "Mlym", # 0D54..0D63 ; Malayalam + "Zzzz", # 0D64..0D65 ; Unknown + "Mlym", # 0D66..0D7F ; Malayalam + "Zzzz", # 0D80..0D80 ; Unknown + "Sinh", # 0D81..0D83 ; Sinhala + "Zzzz", # 0D84..0D84 ; Unknown + "Sinh", # 0D85..0D96 ; Sinhala + "Zzzz", # 0D97..0D99 ; Unknown + "Sinh", # 0D9A..0DB1 ; Sinhala + "Zzzz", # 0DB2..0DB2 ; Unknown + "Sinh", # 0DB3..0DBB ; Sinhala + "Zzzz", # 0DBC..0DBC ; Unknown + "Sinh", # 0DBD..0DBD ; Sinhala + "Zzzz", # 0DBE..0DBF ; Unknown + "Sinh", # 0DC0..0DC6 ; Sinhala + "Zzzz", # 0DC7..0DC9 ; Unknown + "Sinh", # 0DCA..0DCA ; Sinhala + "Zzzz", # 0DCB..0DCE ; Unknown + "Sinh", # 0DCF..0DD4 ; Sinhala + "Zzzz", # 0DD5..0DD5 ; Unknown + "Sinh", # 0DD6..0DD6 ; Sinhala + "Zzzz", # 0DD7..0DD7 ; Unknown + "Sinh", # 0DD8..0DDF ; Sinhala + "Zzzz", # 0DE0..0DE5 ; Unknown + "Sinh", # 0DE6..0DEF ; Sinhala + "Zzzz", # 0DF0..0DF1 ; Unknown + "Sinh", # 0DF2..0DF4 ; Sinhala + "Zzzz", # 0DF5..0E00 ; Unknown + "Thai", # 0E01..0E3A ; Thai + "Zzzz", # 0E3B..0E3E ; Unknown + "Zyyy", # 0E3F..0E3F ; Common + "Thai", # 0E40..0E5B ; Thai + "Zzzz", # 0E5C..0E80 ; Unknown + "Laoo", # 0E81..0E82 ; Lao + "Zzzz", # 0E83..0E83 ; Unknown + "Laoo", # 0E84..0E84 ; Lao + "Zzzz", # 0E85..0E85 ; Unknown + "Laoo", # 0E86..0E8A ; Lao + "Zzzz", # 0E8B..0E8B ; Unknown + "Laoo", # 0E8C..0EA3 ; Lao + "Zzzz", # 0EA4..0EA4 ; Unknown + "Laoo", # 0EA5..0EA5 ; Lao + "Zzzz", # 0EA6..0EA6 ; Unknown + "Laoo", # 0EA7..0EBD ; Lao + "Zzzz", # 0EBE..0EBF ; Unknown + "Laoo", # 0EC0..0EC4 ; Lao + "Zzzz", # 0EC5..0EC5 ; Unknown + "Laoo", # 0EC6..0EC6 ; Lao + "Zzzz", # 0EC7..0EC7 ; Unknown + "Laoo", # 0EC8..0ECE ; Lao + "Zzzz", # 0ECF..0ECF ; Unknown + "Laoo", # 0ED0..0ED9 ; Lao + "Zzzz", # 0EDA..0EDB ; Unknown + "Laoo", # 0EDC..0EDF ; Lao + "Zzzz", # 0EE0..0EFF ; Unknown + "Tibt", # 0F00..0F47 ; Tibetan + "Zzzz", # 0F48..0F48 ; Unknown + "Tibt", # 0F49..0F6C ; Tibetan + "Zzzz", # 0F6D..0F70 ; Unknown + "Tibt", # 0F71..0F97 ; Tibetan + "Zzzz", # 0F98..0F98 ; Unknown + "Tibt", # 0F99..0FBC ; Tibetan + "Zzzz", # 0FBD..0FBD ; Unknown + "Tibt", # 0FBE..0FCC ; Tibetan + "Zzzz", # 0FCD..0FCD ; Unknown + "Tibt", # 0FCE..0FD4 ; Tibetan + "Zyyy", # 0FD5..0FD8 ; Common + "Tibt", # 0FD9..0FDA ; Tibetan + "Zzzz", # 0FDB..0FFF ; Unknown + "Mymr", # 1000..109F ; Myanmar + "Geor", # 10A0..10C5 ; Georgian + "Zzzz", # 10C6..10C6 ; Unknown + "Geor", # 10C7..10C7 ; Georgian + "Zzzz", # 10C8..10CC ; Unknown + "Geor", # 10CD..10CD ; Georgian + "Zzzz", # 10CE..10CF ; Unknown + "Geor", # 10D0..10FA ; Georgian + "Zyyy", # 10FB..10FB ; Common + "Geor", # 10FC..10FF ; Georgian + "Hang", # 1100..11FF ; Hangul + "Ethi", # 1200..1248 ; Ethiopic + "Zzzz", # 1249..1249 ; Unknown + "Ethi", # 124A..124D ; Ethiopic + "Zzzz", # 124E..124F ; Unknown + "Ethi", # 1250..1256 ; Ethiopic + "Zzzz", # 1257..1257 ; Unknown + "Ethi", # 1258..1258 ; Ethiopic + "Zzzz", # 1259..1259 ; Unknown + "Ethi", # 125A..125D ; Ethiopic + "Zzzz", # 125E..125F ; Unknown + "Ethi", # 1260..1288 ; Ethiopic + "Zzzz", # 1289..1289 ; Unknown + "Ethi", # 128A..128D ; Ethiopic + "Zzzz", # 128E..128F ; Unknown + "Ethi", # 1290..12B0 ; Ethiopic + "Zzzz", # 12B1..12B1 ; Unknown + "Ethi", # 12B2..12B5 ; Ethiopic + "Zzzz", # 12B6..12B7 ; Unknown + "Ethi", # 12B8..12BE ; Ethiopic + "Zzzz", # 12BF..12BF ; Unknown + "Ethi", # 12C0..12C0 ; Ethiopic + "Zzzz", # 12C1..12C1 ; Unknown + "Ethi", # 12C2..12C5 ; Ethiopic + "Zzzz", # 12C6..12C7 ; Unknown + "Ethi", # 12C8..12D6 ; Ethiopic + "Zzzz", # 12D7..12D7 ; Unknown + "Ethi", # 12D8..1310 ; Ethiopic + "Zzzz", # 1311..1311 ; Unknown + "Ethi", # 1312..1315 ; Ethiopic + "Zzzz", # 1316..1317 ; Unknown + "Ethi", # 1318..135A ; Ethiopic + "Zzzz", # 135B..135C ; Unknown + "Ethi", # 135D..137C ; Ethiopic + "Zzzz", # 137D..137F ; Unknown + "Ethi", # 1380..1399 ; Ethiopic + "Zzzz", # 139A..139F ; Unknown + "Cher", # 13A0..13F5 ; Cherokee + "Zzzz", # 13F6..13F7 ; Unknown + "Cher", # 13F8..13FD ; Cherokee + "Zzzz", # 13FE..13FF ; Unknown + "Cans", # 1400..167F ; Canadian_Aboriginal + "Ogam", # 1680..169C ; Ogham + "Zzzz", # 169D..169F ; Unknown + "Runr", # 16A0..16EA ; Runic + "Zyyy", # 16EB..16ED ; Common + "Runr", # 16EE..16F8 ; Runic + "Zzzz", # 16F9..16FF ; Unknown + "Tglg", # 1700..1715 ; Tagalog + "Zzzz", # 1716..171E ; Unknown + "Tglg", # 171F..171F ; Tagalog + "Hano", # 1720..1734 ; Hanunoo + "Zyyy", # 1735..1736 ; Common + "Zzzz", # 1737..173F ; Unknown + "Buhd", # 1740..1753 ; Buhid + "Zzzz", # 1754..175F ; Unknown + "Tagb", # 1760..176C ; Tagbanwa + "Zzzz", # 176D..176D ; Unknown + "Tagb", # 176E..1770 ; Tagbanwa + "Zzzz", # 1771..1771 ; Unknown + "Tagb", # 1772..1773 ; Tagbanwa + "Zzzz", # 1774..177F ; Unknown + "Khmr", # 1780..17DD ; Khmer + "Zzzz", # 17DE..17DF ; Unknown + "Khmr", # 17E0..17E9 ; Khmer + "Zzzz", # 17EA..17EF ; Unknown + "Khmr", # 17F0..17F9 ; Khmer + "Zzzz", # 17FA..17FF ; Unknown + "Mong", # 1800..1801 ; Mongolian + "Zyyy", # 1802..1803 ; Common + "Mong", # 1804..1804 ; Mongolian + "Zyyy", # 1805..1805 ; Common + "Mong", # 1806..1819 ; Mongolian + "Zzzz", # 181A..181F ; Unknown + "Mong", # 1820..1878 ; Mongolian + "Zzzz", # 1879..187F ; Unknown + "Mong", # 1880..18AA ; Mongolian + "Zzzz", # 18AB..18AF ; Unknown + "Cans", # 18B0..18F5 ; Canadian_Aboriginal + "Zzzz", # 18F6..18FF ; Unknown + "Limb", # 1900..191E ; Limbu + "Zzzz", # 191F..191F ; Unknown + "Limb", # 1920..192B ; Limbu + "Zzzz", # 192C..192F ; Unknown + "Limb", # 1930..193B ; Limbu + "Zzzz", # 193C..193F ; Unknown + "Limb", # 1940..1940 ; Limbu + "Zzzz", # 1941..1943 ; Unknown + "Limb", # 1944..194F ; Limbu + "Tale", # 1950..196D ; Tai_Le + "Zzzz", # 196E..196F ; Unknown + "Tale", # 1970..1974 ; Tai_Le + "Zzzz", # 1975..197F ; Unknown + "Talu", # 1980..19AB ; New_Tai_Lue + "Zzzz", # 19AC..19AF ; Unknown + "Talu", # 19B0..19C9 ; New_Tai_Lue + "Zzzz", # 19CA..19CF ; Unknown + "Talu", # 19D0..19DA ; New_Tai_Lue + "Zzzz", # 19DB..19DD ; Unknown + "Talu", # 19DE..19DF ; New_Tai_Lue + "Khmr", # 19E0..19FF ; Khmer + "Bugi", # 1A00..1A1B ; Buginese + "Zzzz", # 1A1C..1A1D ; Unknown + "Bugi", # 1A1E..1A1F ; Buginese + "Lana", # 1A20..1A5E ; Tai_Tham + "Zzzz", # 1A5F..1A5F ; Unknown + "Lana", # 1A60..1A7C ; Tai_Tham + "Zzzz", # 1A7D..1A7E ; Unknown + "Lana", # 1A7F..1A89 ; Tai_Tham + "Zzzz", # 1A8A..1A8F ; Unknown + "Lana", # 1A90..1A99 ; Tai_Tham + "Zzzz", # 1A9A..1A9F ; Unknown + "Lana", # 1AA0..1AAD ; Tai_Tham + "Zzzz", # 1AAE..1AAF ; Unknown + "Zinh", # 1AB0..1ACE ; Inherited + "Zzzz", # 1ACF..1AFF ; Unknown + "Bali", # 1B00..1B4C ; Balinese + "Zzzz", # 1B4D..1B4D ; Unknown + "Bali", # 1B4E..1B7F ; Balinese + "Sund", # 1B80..1BBF ; Sundanese + "Batk", # 1BC0..1BF3 ; Batak + "Zzzz", # 1BF4..1BFB ; Unknown + "Batk", # 1BFC..1BFF ; Batak + "Lepc", # 1C00..1C37 ; Lepcha + "Zzzz", # 1C38..1C3A ; Unknown + "Lepc", # 1C3B..1C49 ; Lepcha + "Zzzz", # 1C4A..1C4C ; Unknown + "Lepc", # 1C4D..1C4F ; Lepcha + "Olck", # 1C50..1C7F ; Ol_Chiki + "Cyrl", # 1C80..1C8A ; Cyrillic + "Zzzz", # 1C8B..1C8F ; Unknown + "Geor", # 1C90..1CBA ; Georgian + "Zzzz", # 1CBB..1CBC ; Unknown + "Geor", # 1CBD..1CBF ; Georgian + "Sund", # 1CC0..1CC7 ; Sundanese + "Zzzz", # 1CC8..1CCF ; Unknown + "Zinh", # 1CD0..1CD2 ; Inherited + "Zyyy", # 1CD3..1CD3 ; Common + "Zinh", # 1CD4..1CE0 ; Inherited + "Zyyy", # 1CE1..1CE1 ; Common + "Zinh", # 1CE2..1CE8 ; Inherited + "Zyyy", # 1CE9..1CEC ; Common + "Zinh", # 1CED..1CED ; Inherited + "Zyyy", # 1CEE..1CF3 ; Common + "Zinh", # 1CF4..1CF4 ; Inherited + "Zyyy", # 1CF5..1CF7 ; Common + "Zinh", # 1CF8..1CF9 ; Inherited + "Zyyy", # 1CFA..1CFA ; Common + "Zzzz", # 1CFB..1CFF ; Unknown + "Latn", # 1D00..1D25 ; Latin + "Grek", # 1D26..1D2A ; Greek + "Cyrl", # 1D2B..1D2B ; Cyrillic + "Latn", # 1D2C..1D5C ; Latin + "Grek", # 1D5D..1D61 ; Greek + "Latn", # 1D62..1D65 ; Latin + "Grek", # 1D66..1D6A ; Greek + "Latn", # 1D6B..1D77 ; Latin + "Cyrl", # 1D78..1D78 ; Cyrillic + "Latn", # 1D79..1DBE ; Latin + "Grek", # 1DBF..1DBF ; Greek + "Zinh", # 1DC0..1DFF ; Inherited + "Latn", # 1E00..1EFF ; Latin + "Grek", # 1F00..1F15 ; Greek + "Zzzz", # 1F16..1F17 ; Unknown + "Grek", # 1F18..1F1D ; Greek + "Zzzz", # 1F1E..1F1F ; Unknown + "Grek", # 1F20..1F45 ; Greek + "Zzzz", # 1F46..1F47 ; Unknown + "Grek", # 1F48..1F4D ; Greek + "Zzzz", # 1F4E..1F4F ; Unknown + "Grek", # 1F50..1F57 ; Greek + "Zzzz", # 1F58..1F58 ; Unknown + "Grek", # 1F59..1F59 ; Greek + "Zzzz", # 1F5A..1F5A ; Unknown + "Grek", # 1F5B..1F5B ; Greek + "Zzzz", # 1F5C..1F5C ; Unknown + "Grek", # 1F5D..1F5D ; Greek + "Zzzz", # 1F5E..1F5E ; Unknown + "Grek", # 1F5F..1F7D ; Greek + "Zzzz", # 1F7E..1F7F ; Unknown + "Grek", # 1F80..1FB4 ; Greek + "Zzzz", # 1FB5..1FB5 ; Unknown + "Grek", # 1FB6..1FC4 ; Greek + "Zzzz", # 1FC5..1FC5 ; Unknown + "Grek", # 1FC6..1FD3 ; Greek + "Zzzz", # 1FD4..1FD5 ; Unknown + "Grek", # 1FD6..1FDB ; Greek + "Zzzz", # 1FDC..1FDC ; Unknown + "Grek", # 1FDD..1FEF ; Greek + "Zzzz", # 1FF0..1FF1 ; Unknown + "Grek", # 1FF2..1FF4 ; Greek + "Zzzz", # 1FF5..1FF5 ; Unknown + "Grek", # 1FF6..1FFE ; Greek + "Zzzz", # 1FFF..1FFF ; Unknown + "Zyyy", # 2000..200B ; Common + "Zinh", # 200C..200D ; Inherited + "Zyyy", # 200E..2064 ; Common + "Zzzz", # 2065..2065 ; Unknown + "Zyyy", # 2066..2070 ; Common + "Latn", # 2071..2071 ; Latin + "Zzzz", # 2072..2073 ; Unknown + "Zyyy", # 2074..207E ; Common + "Latn", # 207F..207F ; Latin + "Zyyy", # 2080..208E ; Common + "Zzzz", # 208F..208F ; Unknown + "Latn", # 2090..209C ; Latin + "Zzzz", # 209D..209F ; Unknown + "Zyyy", # 20A0..20C0 ; Common + "Zzzz", # 20C1..20CF ; Unknown + "Zinh", # 20D0..20F0 ; Inherited + "Zzzz", # 20F1..20FF ; Unknown + "Zyyy", # 2100..2125 ; Common + "Grek", # 2126..2126 ; Greek + "Zyyy", # 2127..2129 ; Common + "Latn", # 212A..212B ; Latin + "Zyyy", # 212C..2131 ; Common + "Latn", # 2132..2132 ; Latin + "Zyyy", # 2133..214D ; Common + "Latn", # 214E..214E ; Latin + "Zyyy", # 214F..215F ; Common + "Latn", # 2160..2188 ; Latin + "Zyyy", # 2189..218B ; Common + "Zzzz", # 218C..218F ; Unknown + "Zyyy", # 2190..2429 ; Common + "Zzzz", # 242A..243F ; Unknown + "Zyyy", # 2440..244A ; Common + "Zzzz", # 244B..245F ; Unknown + "Zyyy", # 2460..27FF ; Common + "Brai", # 2800..28FF ; Braille + "Zyyy", # 2900..2B73 ; Common + "Zzzz", # 2B74..2B75 ; Unknown + "Zyyy", # 2B76..2B95 ; Common + "Zzzz", # 2B96..2B96 ; Unknown + "Zyyy", # 2B97..2BFF ; Common + "Glag", # 2C00..2C5F ; Glagolitic + "Latn", # 2C60..2C7F ; Latin + "Copt", # 2C80..2CF3 ; Coptic + "Zzzz", # 2CF4..2CF8 ; Unknown + "Copt", # 2CF9..2CFF ; Coptic + "Geor", # 2D00..2D25 ; Georgian + "Zzzz", # 2D26..2D26 ; Unknown + "Geor", # 2D27..2D27 ; Georgian + "Zzzz", # 2D28..2D2C ; Unknown + "Geor", # 2D2D..2D2D ; Georgian + "Zzzz", # 2D2E..2D2F ; Unknown + "Tfng", # 2D30..2D67 ; Tifinagh + "Zzzz", # 2D68..2D6E ; Unknown + "Tfng", # 2D6F..2D70 ; Tifinagh + "Zzzz", # 2D71..2D7E ; Unknown + "Tfng", # 2D7F..2D7F ; Tifinagh + "Ethi", # 2D80..2D96 ; Ethiopic + "Zzzz", # 2D97..2D9F ; Unknown + "Ethi", # 2DA0..2DA6 ; Ethiopic + "Zzzz", # 2DA7..2DA7 ; Unknown + "Ethi", # 2DA8..2DAE ; Ethiopic + "Zzzz", # 2DAF..2DAF ; Unknown + "Ethi", # 2DB0..2DB6 ; Ethiopic + "Zzzz", # 2DB7..2DB7 ; Unknown + "Ethi", # 2DB8..2DBE ; Ethiopic + "Zzzz", # 2DBF..2DBF ; Unknown + "Ethi", # 2DC0..2DC6 ; Ethiopic + "Zzzz", # 2DC7..2DC7 ; Unknown + "Ethi", # 2DC8..2DCE ; Ethiopic + "Zzzz", # 2DCF..2DCF ; Unknown + "Ethi", # 2DD0..2DD6 ; Ethiopic + "Zzzz", # 2DD7..2DD7 ; Unknown + "Ethi", # 2DD8..2DDE ; Ethiopic + "Zzzz", # 2DDF..2DDF ; Unknown + "Cyrl", # 2DE0..2DFF ; Cyrillic + "Zyyy", # 2E00..2E5D ; Common + "Zzzz", # 2E5E..2E7F ; Unknown + "Hani", # 2E80..2E99 ; Han + "Zzzz", # 2E9A..2E9A ; Unknown + "Hani", # 2E9B..2EF3 ; Han + "Zzzz", # 2EF4..2EFF ; Unknown + "Hani", # 2F00..2FD5 ; Han + "Zzzz", # 2FD6..2FEF ; Unknown + "Zyyy", # 2FF0..3004 ; Common + "Hani", # 3005..3005 ; Han + "Zyyy", # 3006..3006 ; Common + "Hani", # 3007..3007 ; Han + "Zyyy", # 3008..3020 ; Common + "Hani", # 3021..3029 ; Han + "Zinh", # 302A..302D ; Inherited + "Hang", # 302E..302F ; Hangul + "Zyyy", # 3030..3037 ; Common + "Hani", # 3038..303B ; Han + "Zyyy", # 303C..303F ; Common + "Zzzz", # 3040..3040 ; Unknown + "Hira", # 3041..3096 ; Hiragana + "Zzzz", # 3097..3098 ; Unknown + "Zinh", # 3099..309A ; Inherited + "Zyyy", # 309B..309C ; Common + "Hira", # 309D..309F ; Hiragana + "Zyyy", # 30A0..30A0 ; Common + "Kana", # 30A1..30FA ; Katakana + "Zyyy", # 30FB..30FC ; Common + "Kana", # 30FD..30FF ; Katakana + "Zzzz", # 3100..3104 ; Unknown + "Bopo", # 3105..312F ; Bopomofo + "Zzzz", # 3130..3130 ; Unknown + "Hang", # 3131..318E ; Hangul + "Zzzz", # 318F..318F ; Unknown + "Zyyy", # 3190..319F ; Common + "Bopo", # 31A0..31BF ; Bopomofo + "Zyyy", # 31C0..31E5 ; Common + "Zzzz", # 31E6..31EE ; Unknown + "Zyyy", # 31EF..31EF ; Common + "Kana", # 31F0..31FF ; Katakana + "Hang", # 3200..321E ; Hangul + "Zzzz", # 321F..321F ; Unknown + "Zyyy", # 3220..325F ; Common + "Hang", # 3260..327E ; Hangul + "Zyyy", # 327F..32CF ; Common + "Kana", # 32D0..32FE ; Katakana + "Zyyy", # 32FF..32FF ; Common + "Kana", # 3300..3357 ; Katakana + "Zyyy", # 3358..33FF ; Common + "Hani", # 3400..4DBF ; Han + "Zyyy", # 4DC0..4DFF ; Common + "Hani", # 4E00..9FFF ; Han + "Yiii", # A000..A48C ; Yi + "Zzzz", # A48D..A48F ; Unknown + "Yiii", # A490..A4C6 ; Yi + "Zzzz", # A4C7..A4CF ; Unknown + "Lisu", # A4D0..A4FF ; Lisu + "Vaii", # A500..A62B ; Vai + "Zzzz", # A62C..A63F ; Unknown + "Cyrl", # A640..A69F ; Cyrillic + "Bamu", # A6A0..A6F7 ; Bamum + "Zzzz", # A6F8..A6FF ; Unknown + "Zyyy", # A700..A721 ; Common + "Latn", # A722..A787 ; Latin + "Zyyy", # A788..A78A ; Common + "Latn", # A78B..A7CD ; Latin + "Zzzz", # A7CE..A7CF ; Unknown + "Latn", # A7D0..A7D1 ; Latin + "Zzzz", # A7D2..A7D2 ; Unknown + "Latn", # A7D3..A7D3 ; Latin + "Zzzz", # A7D4..A7D4 ; Unknown + "Latn", # A7D5..A7DC ; Latin + "Zzzz", # A7DD..A7F1 ; Unknown + "Latn", # A7F2..A7FF ; Latin + "Sylo", # A800..A82C ; Syloti_Nagri + "Zzzz", # A82D..A82F ; Unknown + "Zyyy", # A830..A839 ; Common + "Zzzz", # A83A..A83F ; Unknown + "Phag", # A840..A877 ; Phags_Pa + "Zzzz", # A878..A87F ; Unknown + "Saur", # A880..A8C5 ; Saurashtra + "Zzzz", # A8C6..A8CD ; Unknown + "Saur", # A8CE..A8D9 ; Saurashtra + "Zzzz", # A8DA..A8DF ; Unknown + "Deva", # A8E0..A8FF ; Devanagari + "Kali", # A900..A92D ; Kayah_Li + "Zyyy", # A92E..A92E ; Common + "Kali", # A92F..A92F ; Kayah_Li + "Rjng", # A930..A953 ; Rejang + "Zzzz", # A954..A95E ; Unknown + "Rjng", # A95F..A95F ; Rejang + "Hang", # A960..A97C ; Hangul + "Zzzz", # A97D..A97F ; Unknown + "Java", # A980..A9CD ; Javanese + "Zzzz", # A9CE..A9CE ; Unknown + "Zyyy", # A9CF..A9CF ; Common + "Java", # A9D0..A9D9 ; Javanese + "Zzzz", # A9DA..A9DD ; Unknown + "Java", # A9DE..A9DF ; Javanese + "Mymr", # A9E0..A9FE ; Myanmar + "Zzzz", # A9FF..A9FF ; Unknown + "Cham", # AA00..AA36 ; Cham + "Zzzz", # AA37..AA3F ; Unknown + "Cham", # AA40..AA4D ; Cham + "Zzzz", # AA4E..AA4F ; Unknown + "Cham", # AA50..AA59 ; Cham + "Zzzz", # AA5A..AA5B ; Unknown + "Cham", # AA5C..AA5F ; Cham + "Mymr", # AA60..AA7F ; Myanmar + "Tavt", # AA80..AAC2 ; Tai_Viet + "Zzzz", # AAC3..AADA ; Unknown + "Tavt", # AADB..AADF ; Tai_Viet + "Mtei", # AAE0..AAF6 ; Meetei_Mayek + "Zzzz", # AAF7..AB00 ; Unknown + "Ethi", # AB01..AB06 ; Ethiopic + "Zzzz", # AB07..AB08 ; Unknown + "Ethi", # AB09..AB0E ; Ethiopic + "Zzzz", # AB0F..AB10 ; Unknown + "Ethi", # AB11..AB16 ; Ethiopic + "Zzzz", # AB17..AB1F ; Unknown + "Ethi", # AB20..AB26 ; Ethiopic + "Zzzz", # AB27..AB27 ; Unknown + "Ethi", # AB28..AB2E ; Ethiopic + "Zzzz", # AB2F..AB2F ; Unknown + "Latn", # AB30..AB5A ; Latin + "Zyyy", # AB5B..AB5B ; Common + "Latn", # AB5C..AB64 ; Latin + "Grek", # AB65..AB65 ; Greek + "Latn", # AB66..AB69 ; Latin + "Zyyy", # AB6A..AB6B ; Common + "Zzzz", # AB6C..AB6F ; Unknown + "Cher", # AB70..ABBF ; Cherokee + "Mtei", # ABC0..ABED ; Meetei_Mayek + "Zzzz", # ABEE..ABEF ; Unknown + "Mtei", # ABF0..ABF9 ; Meetei_Mayek + "Zzzz", # ABFA..ABFF ; Unknown + "Hang", # AC00..D7A3 ; Hangul + "Zzzz", # D7A4..D7AF ; Unknown + "Hang", # D7B0..D7C6 ; Hangul + "Zzzz", # D7C7..D7CA ; Unknown + "Hang", # D7CB..D7FB ; Hangul + "Zzzz", # D7FC..F8FF ; Unknown + "Hani", # F900..FA6D ; Han + "Zzzz", # FA6E..FA6F ; Unknown + "Hani", # FA70..FAD9 ; Han + "Zzzz", # FADA..FAFF ; Unknown + "Latn", # FB00..FB06 ; Latin + "Zzzz", # FB07..FB12 ; Unknown + "Armn", # FB13..FB17 ; Armenian + "Zzzz", # FB18..FB1C ; Unknown + "Hebr", # FB1D..FB36 ; Hebrew + "Zzzz", # FB37..FB37 ; Unknown + "Hebr", # FB38..FB3C ; Hebrew + "Zzzz", # FB3D..FB3D ; Unknown + "Hebr", # FB3E..FB3E ; Hebrew + "Zzzz", # FB3F..FB3F ; Unknown + "Hebr", # FB40..FB41 ; Hebrew + "Zzzz", # FB42..FB42 ; Unknown + "Hebr", # FB43..FB44 ; Hebrew + "Zzzz", # FB45..FB45 ; Unknown + "Hebr", # FB46..FB4F ; Hebrew + "Arab", # FB50..FBC2 ; Arabic + "Zzzz", # FBC3..FBD2 ; Unknown + "Arab", # FBD3..FD3D ; Arabic + "Zyyy", # FD3E..FD3F ; Common + "Arab", # FD40..FD8F ; Arabic + "Zzzz", # FD90..FD91 ; Unknown + "Arab", # FD92..FDC7 ; Arabic + "Zzzz", # FDC8..FDCE ; Unknown + "Arab", # FDCF..FDCF ; Arabic + "Zzzz", # FDD0..FDEF ; Unknown + "Arab", # FDF0..FDFF ; Arabic + "Zinh", # FE00..FE0F ; Inherited + "Zyyy", # FE10..FE19 ; Common + "Zzzz", # FE1A..FE1F ; Unknown + "Zinh", # FE20..FE2D ; Inherited + "Cyrl", # FE2E..FE2F ; Cyrillic + "Zyyy", # FE30..FE52 ; Common + "Zzzz", # FE53..FE53 ; Unknown + "Zyyy", # FE54..FE66 ; Common + "Zzzz", # FE67..FE67 ; Unknown + "Zyyy", # FE68..FE6B ; Common + "Zzzz", # FE6C..FE6F ; Unknown + "Arab", # FE70..FE74 ; Arabic + "Zzzz", # FE75..FE75 ; Unknown + "Arab", # FE76..FEFC ; Arabic + "Zzzz", # FEFD..FEFE ; Unknown + "Zyyy", # FEFF..FEFF ; Common + "Zzzz", # FF00..FF00 ; Unknown + "Zyyy", # FF01..FF20 ; Common + "Latn", # FF21..FF3A ; Latin + "Zyyy", # FF3B..FF40 ; Common + "Latn", # FF41..FF5A ; Latin + "Zyyy", # FF5B..FF65 ; Common + "Kana", # FF66..FF6F ; Katakana + "Zyyy", # FF70..FF70 ; Common + "Kana", # FF71..FF9D ; Katakana + "Zyyy", # FF9E..FF9F ; Common + "Hang", # FFA0..FFBE ; Hangul + "Zzzz", # FFBF..FFC1 ; Unknown + "Hang", # FFC2..FFC7 ; Hangul + "Zzzz", # FFC8..FFC9 ; Unknown + "Hang", # FFCA..FFCF ; Hangul + "Zzzz", # FFD0..FFD1 ; Unknown + "Hang", # FFD2..FFD7 ; Hangul + "Zzzz", # FFD8..FFD9 ; Unknown + "Hang", # FFDA..FFDC ; Hangul + "Zzzz", # FFDD..FFDF ; Unknown + "Zyyy", # FFE0..FFE6 ; Common + "Zzzz", # FFE7..FFE7 ; Unknown + "Zyyy", # FFE8..FFEE ; Common + "Zzzz", # FFEF..FFF8 ; Unknown + "Zyyy", # FFF9..FFFD ; Common + "Zzzz", # FFFE..FFFF ; Unknown + "Linb", # 10000..1000B ; Linear_B + "Zzzz", # 1000C..1000C ; Unknown + "Linb", # 1000D..10026 ; Linear_B + "Zzzz", # 10027..10027 ; Unknown + "Linb", # 10028..1003A ; Linear_B + "Zzzz", # 1003B..1003B ; Unknown + "Linb", # 1003C..1003D ; Linear_B + "Zzzz", # 1003E..1003E ; Unknown + "Linb", # 1003F..1004D ; Linear_B + "Zzzz", # 1004E..1004F ; Unknown + "Linb", # 10050..1005D ; Linear_B + "Zzzz", # 1005E..1007F ; Unknown + "Linb", # 10080..100FA ; Linear_B + "Zzzz", # 100FB..100FF ; Unknown + "Zyyy", # 10100..10102 ; Common + "Zzzz", # 10103..10106 ; Unknown + "Zyyy", # 10107..10133 ; Common + "Zzzz", # 10134..10136 ; Unknown + "Zyyy", # 10137..1013F ; Common + "Grek", # 10140..1018E ; Greek + "Zzzz", # 1018F..1018F ; Unknown + "Zyyy", # 10190..1019C ; Common + "Zzzz", # 1019D..1019F ; Unknown + "Grek", # 101A0..101A0 ; Greek + "Zzzz", # 101A1..101CF ; Unknown + "Zyyy", # 101D0..101FC ; Common + "Zinh", # 101FD..101FD ; Inherited + "Zzzz", # 101FE..1027F ; Unknown + "Lyci", # 10280..1029C ; Lycian + "Zzzz", # 1029D..1029F ; Unknown + "Cari", # 102A0..102D0 ; Carian + "Zzzz", # 102D1..102DF ; Unknown + "Zinh", # 102E0..102E0 ; Inherited + "Zyyy", # 102E1..102FB ; Common + "Zzzz", # 102FC..102FF ; Unknown + "Ital", # 10300..10323 ; Old_Italic + "Zzzz", # 10324..1032C ; Unknown + "Ital", # 1032D..1032F ; Old_Italic + "Goth", # 10330..1034A ; Gothic + "Zzzz", # 1034B..1034F ; Unknown + "Perm", # 10350..1037A ; Old_Permic + "Zzzz", # 1037B..1037F ; Unknown + "Ugar", # 10380..1039D ; Ugaritic + "Zzzz", # 1039E..1039E ; Unknown + "Ugar", # 1039F..1039F ; Ugaritic + "Xpeo", # 103A0..103C3 ; Old_Persian + "Zzzz", # 103C4..103C7 ; Unknown + "Xpeo", # 103C8..103D5 ; Old_Persian + "Zzzz", # 103D6..103FF ; Unknown + "Dsrt", # 10400..1044F ; Deseret + "Shaw", # 10450..1047F ; Shavian + "Osma", # 10480..1049D ; Osmanya + "Zzzz", # 1049E..1049F ; Unknown + "Osma", # 104A0..104A9 ; Osmanya + "Zzzz", # 104AA..104AF ; Unknown + "Osge", # 104B0..104D3 ; Osage + "Zzzz", # 104D4..104D7 ; Unknown + "Osge", # 104D8..104FB ; Osage + "Zzzz", # 104FC..104FF ; Unknown + "Elba", # 10500..10527 ; Elbasan + "Zzzz", # 10528..1052F ; Unknown + "Aghb", # 10530..10563 ; Caucasian_Albanian + "Zzzz", # 10564..1056E ; Unknown + "Aghb", # 1056F..1056F ; Caucasian_Albanian + "Vith", # 10570..1057A ; Vithkuqi + "Zzzz", # 1057B..1057B ; Unknown + "Vith", # 1057C..1058A ; Vithkuqi + "Zzzz", # 1058B..1058B ; Unknown + "Vith", # 1058C..10592 ; Vithkuqi + "Zzzz", # 10593..10593 ; Unknown + "Vith", # 10594..10595 ; Vithkuqi + "Zzzz", # 10596..10596 ; Unknown + "Vith", # 10597..105A1 ; Vithkuqi + "Zzzz", # 105A2..105A2 ; Unknown + "Vith", # 105A3..105B1 ; Vithkuqi + "Zzzz", # 105B2..105B2 ; Unknown + "Vith", # 105B3..105B9 ; Vithkuqi + "Zzzz", # 105BA..105BA ; Unknown + "Vith", # 105BB..105BC ; Vithkuqi + "Zzzz", # 105BD..105BF ; Unknown + "Todr", # 105C0..105F3 ; Todhri + "Zzzz", # 105F4..105FF ; Unknown + "Lina", # 10600..10736 ; Linear_A + "Zzzz", # 10737..1073F ; Unknown + "Lina", # 10740..10755 ; Linear_A + "Zzzz", # 10756..1075F ; Unknown + "Lina", # 10760..10767 ; Linear_A + "Zzzz", # 10768..1077F ; Unknown + "Latn", # 10780..10785 ; Latin + "Zzzz", # 10786..10786 ; Unknown + "Latn", # 10787..107B0 ; Latin + "Zzzz", # 107B1..107B1 ; Unknown + "Latn", # 107B2..107BA ; Latin + "Zzzz", # 107BB..107FF ; Unknown + "Cprt", # 10800..10805 ; Cypriot + "Zzzz", # 10806..10807 ; Unknown + "Cprt", # 10808..10808 ; Cypriot + "Zzzz", # 10809..10809 ; Unknown + "Cprt", # 1080A..10835 ; Cypriot + "Zzzz", # 10836..10836 ; Unknown + "Cprt", # 10837..10838 ; Cypriot + "Zzzz", # 10839..1083B ; Unknown + "Cprt", # 1083C..1083C ; Cypriot + "Zzzz", # 1083D..1083E ; Unknown + "Cprt", # 1083F..1083F ; Cypriot + "Armi", # 10840..10855 ; Imperial_Aramaic + "Zzzz", # 10856..10856 ; Unknown + "Armi", # 10857..1085F ; Imperial_Aramaic + "Palm", # 10860..1087F ; Palmyrene + "Nbat", # 10880..1089E ; Nabataean + "Zzzz", # 1089F..108A6 ; Unknown + "Nbat", # 108A7..108AF ; Nabataean + "Zzzz", # 108B0..108DF ; Unknown + "Hatr", # 108E0..108F2 ; Hatran + "Zzzz", # 108F3..108F3 ; Unknown + "Hatr", # 108F4..108F5 ; Hatran + "Zzzz", # 108F6..108FA ; Unknown + "Hatr", # 108FB..108FF ; Hatran + "Phnx", # 10900..1091B ; Phoenician + "Zzzz", # 1091C..1091E ; Unknown + "Phnx", # 1091F..1091F ; Phoenician + "Lydi", # 10920..10939 ; Lydian + "Zzzz", # 1093A..1093E ; Unknown + "Lydi", # 1093F..1093F ; Lydian + "Zzzz", # 10940..1097F ; Unknown + "Mero", # 10980..1099F ; Meroitic_Hieroglyphs + "Merc", # 109A0..109B7 ; Meroitic_Cursive + "Zzzz", # 109B8..109BB ; Unknown + "Merc", # 109BC..109CF ; Meroitic_Cursive + "Zzzz", # 109D0..109D1 ; Unknown + "Merc", # 109D2..109FF ; Meroitic_Cursive + "Khar", # 10A00..10A03 ; Kharoshthi + "Zzzz", # 10A04..10A04 ; Unknown + "Khar", # 10A05..10A06 ; Kharoshthi + "Zzzz", # 10A07..10A0B ; Unknown + "Khar", # 10A0C..10A13 ; Kharoshthi + "Zzzz", # 10A14..10A14 ; Unknown + "Khar", # 10A15..10A17 ; Kharoshthi + "Zzzz", # 10A18..10A18 ; Unknown + "Khar", # 10A19..10A35 ; Kharoshthi + "Zzzz", # 10A36..10A37 ; Unknown + "Khar", # 10A38..10A3A ; Kharoshthi + "Zzzz", # 10A3B..10A3E ; Unknown + "Khar", # 10A3F..10A48 ; Kharoshthi + "Zzzz", # 10A49..10A4F ; Unknown + "Khar", # 10A50..10A58 ; Kharoshthi + "Zzzz", # 10A59..10A5F ; Unknown + "Sarb", # 10A60..10A7F ; Old_South_Arabian + "Narb", # 10A80..10A9F ; Old_North_Arabian + "Zzzz", # 10AA0..10ABF ; Unknown + "Mani", # 10AC0..10AE6 ; Manichaean + "Zzzz", # 10AE7..10AEA ; Unknown + "Mani", # 10AEB..10AF6 ; Manichaean + "Zzzz", # 10AF7..10AFF ; Unknown + "Avst", # 10B00..10B35 ; Avestan + "Zzzz", # 10B36..10B38 ; Unknown + "Avst", # 10B39..10B3F ; Avestan + "Prti", # 10B40..10B55 ; Inscriptional_Parthian + "Zzzz", # 10B56..10B57 ; Unknown + "Prti", # 10B58..10B5F ; Inscriptional_Parthian + "Phli", # 10B60..10B72 ; Inscriptional_Pahlavi + "Zzzz", # 10B73..10B77 ; Unknown + "Phli", # 10B78..10B7F ; Inscriptional_Pahlavi + "Phlp", # 10B80..10B91 ; Psalter_Pahlavi + "Zzzz", # 10B92..10B98 ; Unknown + "Phlp", # 10B99..10B9C ; Psalter_Pahlavi + "Zzzz", # 10B9D..10BA8 ; Unknown + "Phlp", # 10BA9..10BAF ; Psalter_Pahlavi + "Zzzz", # 10BB0..10BFF ; Unknown + "Orkh", # 10C00..10C48 ; Old_Turkic + "Zzzz", # 10C49..10C7F ; Unknown + "Hung", # 10C80..10CB2 ; Old_Hungarian + "Zzzz", # 10CB3..10CBF ; Unknown + "Hung", # 10CC0..10CF2 ; Old_Hungarian + "Zzzz", # 10CF3..10CF9 ; Unknown + "Hung", # 10CFA..10CFF ; Old_Hungarian + "Rohg", # 10D00..10D27 ; Hanifi_Rohingya + "Zzzz", # 10D28..10D2F ; Unknown + "Rohg", # 10D30..10D39 ; Hanifi_Rohingya + "Zzzz", # 10D3A..10D3F ; Unknown + "Gara", # 10D40..10D65 ; Garay + "Zzzz", # 10D66..10D68 ; Unknown + "Gara", # 10D69..10D85 ; Garay + "Zzzz", # 10D86..10D8D ; Unknown + "Gara", # 10D8E..10D8F ; Garay + "Zzzz", # 10D90..10E5F ; Unknown + "Arab", # 10E60..10E7E ; Arabic + "Zzzz", # 10E7F..10E7F ; Unknown + "Yezi", # 10E80..10EA9 ; Yezidi + "Zzzz", # 10EAA..10EAA ; Unknown + "Yezi", # 10EAB..10EAD ; Yezidi + "Zzzz", # 10EAE..10EAF ; Unknown + "Yezi", # 10EB0..10EB1 ; Yezidi + "Zzzz", # 10EB2..10EC1 ; Unknown + "Arab", # 10EC2..10EC4 ; Arabic + "Zzzz", # 10EC5..10EFB ; Unknown + "Arab", # 10EFC..10EFF ; Arabic + "Sogo", # 10F00..10F27 ; Old_Sogdian + "Zzzz", # 10F28..10F2F ; Unknown + "Sogd", # 10F30..10F59 ; Sogdian + "Zzzz", # 10F5A..10F6F ; Unknown + "Ougr", # 10F70..10F89 ; Old_Uyghur + "Zzzz", # 10F8A..10FAF ; Unknown + "Chrs", # 10FB0..10FCB ; Chorasmian + "Zzzz", # 10FCC..10FDF ; Unknown + "Elym", # 10FE0..10FF6 ; Elymaic + "Zzzz", # 10FF7..10FFF ; Unknown + "Brah", # 11000..1104D ; Brahmi + "Zzzz", # 1104E..11051 ; Unknown + "Brah", # 11052..11075 ; Brahmi + "Zzzz", # 11076..1107E ; Unknown + "Brah", # 1107F..1107F ; Brahmi + "Kthi", # 11080..110C2 ; Kaithi + "Zzzz", # 110C3..110CC ; Unknown + "Kthi", # 110CD..110CD ; Kaithi + "Zzzz", # 110CE..110CF ; Unknown + "Sora", # 110D0..110E8 ; Sora_Sompeng + "Zzzz", # 110E9..110EF ; Unknown + "Sora", # 110F0..110F9 ; Sora_Sompeng + "Zzzz", # 110FA..110FF ; Unknown + "Cakm", # 11100..11134 ; Chakma + "Zzzz", # 11135..11135 ; Unknown + "Cakm", # 11136..11147 ; Chakma + "Zzzz", # 11148..1114F ; Unknown + "Mahj", # 11150..11176 ; Mahajani + "Zzzz", # 11177..1117F ; Unknown + "Shrd", # 11180..111DF ; Sharada + "Zzzz", # 111E0..111E0 ; Unknown + "Sinh", # 111E1..111F4 ; Sinhala + "Zzzz", # 111F5..111FF ; Unknown + "Khoj", # 11200..11211 ; Khojki + "Zzzz", # 11212..11212 ; Unknown + "Khoj", # 11213..11241 ; Khojki + "Zzzz", # 11242..1127F ; Unknown + "Mult", # 11280..11286 ; Multani + "Zzzz", # 11287..11287 ; Unknown + "Mult", # 11288..11288 ; Multani + "Zzzz", # 11289..11289 ; Unknown + "Mult", # 1128A..1128D ; Multani + "Zzzz", # 1128E..1128E ; Unknown + "Mult", # 1128F..1129D ; Multani + "Zzzz", # 1129E..1129E ; Unknown + "Mult", # 1129F..112A9 ; Multani + "Zzzz", # 112AA..112AF ; Unknown + "Sind", # 112B0..112EA ; Khudawadi + "Zzzz", # 112EB..112EF ; Unknown + "Sind", # 112F0..112F9 ; Khudawadi + "Zzzz", # 112FA..112FF ; Unknown + "Gran", # 11300..11303 ; Grantha + "Zzzz", # 11304..11304 ; Unknown + "Gran", # 11305..1130C ; Grantha + "Zzzz", # 1130D..1130E ; Unknown + "Gran", # 1130F..11310 ; Grantha + "Zzzz", # 11311..11312 ; Unknown + "Gran", # 11313..11328 ; Grantha + "Zzzz", # 11329..11329 ; Unknown + "Gran", # 1132A..11330 ; Grantha + "Zzzz", # 11331..11331 ; Unknown + "Gran", # 11332..11333 ; Grantha + "Zzzz", # 11334..11334 ; Unknown + "Gran", # 11335..11339 ; Grantha + "Zzzz", # 1133A..1133A ; Unknown + "Zinh", # 1133B..1133B ; Inherited + "Gran", # 1133C..11344 ; Grantha + "Zzzz", # 11345..11346 ; Unknown + "Gran", # 11347..11348 ; Grantha + "Zzzz", # 11349..1134A ; Unknown + "Gran", # 1134B..1134D ; Grantha + "Zzzz", # 1134E..1134F ; Unknown + "Gran", # 11350..11350 ; Grantha + "Zzzz", # 11351..11356 ; Unknown + "Gran", # 11357..11357 ; Grantha + "Zzzz", # 11358..1135C ; Unknown + "Gran", # 1135D..11363 ; Grantha + "Zzzz", # 11364..11365 ; Unknown + "Gran", # 11366..1136C ; Grantha + "Zzzz", # 1136D..1136F ; Unknown + "Gran", # 11370..11374 ; Grantha + "Zzzz", # 11375..1137F ; Unknown + "Tutg", # 11380..11389 ; Tulu_Tigalari + "Zzzz", # 1138A..1138A ; Unknown + "Tutg", # 1138B..1138B ; Tulu_Tigalari + "Zzzz", # 1138C..1138D ; Unknown + "Tutg", # 1138E..1138E ; Tulu_Tigalari + "Zzzz", # 1138F..1138F ; Unknown + "Tutg", # 11390..113B5 ; Tulu_Tigalari + "Zzzz", # 113B6..113B6 ; Unknown + "Tutg", # 113B7..113C0 ; Tulu_Tigalari + "Zzzz", # 113C1..113C1 ; Unknown + "Tutg", # 113C2..113C2 ; Tulu_Tigalari + "Zzzz", # 113C3..113C4 ; Unknown + "Tutg", # 113C5..113C5 ; Tulu_Tigalari + "Zzzz", # 113C6..113C6 ; Unknown + "Tutg", # 113C7..113CA ; Tulu_Tigalari + "Zzzz", # 113CB..113CB ; Unknown + "Tutg", # 113CC..113D5 ; Tulu_Tigalari + "Zzzz", # 113D6..113D6 ; Unknown + "Tutg", # 113D7..113D8 ; Tulu_Tigalari + "Zzzz", # 113D9..113E0 ; Unknown + "Tutg", # 113E1..113E2 ; Tulu_Tigalari + "Zzzz", # 113E3..113FF ; Unknown + "Newa", # 11400..1145B ; Newa + "Zzzz", # 1145C..1145C ; Unknown + "Newa", # 1145D..11461 ; Newa + "Zzzz", # 11462..1147F ; Unknown + "Tirh", # 11480..114C7 ; Tirhuta + "Zzzz", # 114C8..114CF ; Unknown + "Tirh", # 114D0..114D9 ; Tirhuta + "Zzzz", # 114DA..1157F ; Unknown + "Sidd", # 11580..115B5 ; Siddham + "Zzzz", # 115B6..115B7 ; Unknown + "Sidd", # 115B8..115DD ; Siddham + "Zzzz", # 115DE..115FF ; Unknown + "Modi", # 11600..11644 ; Modi + "Zzzz", # 11645..1164F ; Unknown + "Modi", # 11650..11659 ; Modi + "Zzzz", # 1165A..1165F ; Unknown + "Mong", # 11660..1166C ; Mongolian + "Zzzz", # 1166D..1167F ; Unknown + "Takr", # 11680..116B9 ; Takri + "Zzzz", # 116BA..116BF ; Unknown + "Takr", # 116C0..116C9 ; Takri + "Zzzz", # 116CA..116CF ; Unknown + "Mymr", # 116D0..116E3 ; Myanmar + "Zzzz", # 116E4..116FF ; Unknown + "Ahom", # 11700..1171A ; Ahom + "Zzzz", # 1171B..1171C ; Unknown + "Ahom", # 1171D..1172B ; Ahom + "Zzzz", # 1172C..1172F ; Unknown + "Ahom", # 11730..11746 ; Ahom + "Zzzz", # 11747..117FF ; Unknown + "Dogr", # 11800..1183B ; Dogra + "Zzzz", # 1183C..1189F ; Unknown + "Wara", # 118A0..118F2 ; Warang_Citi + "Zzzz", # 118F3..118FE ; Unknown + "Wara", # 118FF..118FF ; Warang_Citi + "Diak", # 11900..11906 ; Dives_Akuru + "Zzzz", # 11907..11908 ; Unknown + "Diak", # 11909..11909 ; Dives_Akuru + "Zzzz", # 1190A..1190B ; Unknown + "Diak", # 1190C..11913 ; Dives_Akuru + "Zzzz", # 11914..11914 ; Unknown + "Diak", # 11915..11916 ; Dives_Akuru + "Zzzz", # 11917..11917 ; Unknown + "Diak", # 11918..11935 ; Dives_Akuru + "Zzzz", # 11936..11936 ; Unknown + "Diak", # 11937..11938 ; Dives_Akuru + "Zzzz", # 11939..1193A ; Unknown + "Diak", # 1193B..11946 ; Dives_Akuru + "Zzzz", # 11947..1194F ; Unknown + "Diak", # 11950..11959 ; Dives_Akuru + "Zzzz", # 1195A..1199F ; Unknown + "Nand", # 119A0..119A7 ; Nandinagari + "Zzzz", # 119A8..119A9 ; Unknown + "Nand", # 119AA..119D7 ; Nandinagari + "Zzzz", # 119D8..119D9 ; Unknown + "Nand", # 119DA..119E4 ; Nandinagari + "Zzzz", # 119E5..119FF ; Unknown + "Zanb", # 11A00..11A47 ; Zanabazar_Square + "Zzzz", # 11A48..11A4F ; Unknown + "Soyo", # 11A50..11AA2 ; Soyombo + "Zzzz", # 11AA3..11AAF ; Unknown + "Cans", # 11AB0..11ABF ; Canadian_Aboriginal + "Pauc", # 11AC0..11AF8 ; Pau_Cin_Hau + "Zzzz", # 11AF9..11AFF ; Unknown + "Deva", # 11B00..11B09 ; Devanagari + "Zzzz", # 11B0A..11BBF ; Unknown + "Sunu", # 11BC0..11BE1 ; Sunuwar + "Zzzz", # 11BE2..11BEF ; Unknown + "Sunu", # 11BF0..11BF9 ; Sunuwar + "Zzzz", # 11BFA..11BFF ; Unknown + "Bhks", # 11C00..11C08 ; Bhaiksuki + "Zzzz", # 11C09..11C09 ; Unknown + "Bhks", # 11C0A..11C36 ; Bhaiksuki + "Zzzz", # 11C37..11C37 ; Unknown + "Bhks", # 11C38..11C45 ; Bhaiksuki + "Zzzz", # 11C46..11C4F ; Unknown + "Bhks", # 11C50..11C6C ; Bhaiksuki + "Zzzz", # 11C6D..11C6F ; Unknown + "Marc", # 11C70..11C8F ; Marchen + "Zzzz", # 11C90..11C91 ; Unknown + "Marc", # 11C92..11CA7 ; Marchen + "Zzzz", # 11CA8..11CA8 ; Unknown + "Marc", # 11CA9..11CB6 ; Marchen + "Zzzz", # 11CB7..11CFF ; Unknown + "Gonm", # 11D00..11D06 ; Masaram_Gondi + "Zzzz", # 11D07..11D07 ; Unknown + "Gonm", # 11D08..11D09 ; Masaram_Gondi + "Zzzz", # 11D0A..11D0A ; Unknown + "Gonm", # 11D0B..11D36 ; Masaram_Gondi + "Zzzz", # 11D37..11D39 ; Unknown + "Gonm", # 11D3A..11D3A ; Masaram_Gondi + "Zzzz", # 11D3B..11D3B ; Unknown + "Gonm", # 11D3C..11D3D ; Masaram_Gondi + "Zzzz", # 11D3E..11D3E ; Unknown + "Gonm", # 11D3F..11D47 ; Masaram_Gondi + "Zzzz", # 11D48..11D4F ; Unknown + "Gonm", # 11D50..11D59 ; Masaram_Gondi + "Zzzz", # 11D5A..11D5F ; Unknown + "Gong", # 11D60..11D65 ; Gunjala_Gondi + "Zzzz", # 11D66..11D66 ; Unknown + "Gong", # 11D67..11D68 ; Gunjala_Gondi + "Zzzz", # 11D69..11D69 ; Unknown + "Gong", # 11D6A..11D8E ; Gunjala_Gondi + "Zzzz", # 11D8F..11D8F ; Unknown + "Gong", # 11D90..11D91 ; Gunjala_Gondi + "Zzzz", # 11D92..11D92 ; Unknown + "Gong", # 11D93..11D98 ; Gunjala_Gondi + "Zzzz", # 11D99..11D9F ; Unknown + "Gong", # 11DA0..11DA9 ; Gunjala_Gondi + "Zzzz", # 11DAA..11EDF ; Unknown + "Maka", # 11EE0..11EF8 ; Makasar + "Zzzz", # 11EF9..11EFF ; Unknown + "Kawi", # 11F00..11F10 ; Kawi + "Zzzz", # 11F11..11F11 ; Unknown + "Kawi", # 11F12..11F3A ; Kawi + "Zzzz", # 11F3B..11F3D ; Unknown + "Kawi", # 11F3E..11F5A ; Kawi + "Zzzz", # 11F5B..11FAF ; Unknown + "Lisu", # 11FB0..11FB0 ; Lisu + "Zzzz", # 11FB1..11FBF ; Unknown + "Taml", # 11FC0..11FF1 ; Tamil + "Zzzz", # 11FF2..11FFE ; Unknown + "Taml", # 11FFF..11FFF ; Tamil + "Xsux", # 12000..12399 ; Cuneiform + "Zzzz", # 1239A..123FF ; Unknown + "Xsux", # 12400..1246E ; Cuneiform + "Zzzz", # 1246F..1246F ; Unknown + "Xsux", # 12470..12474 ; Cuneiform + "Zzzz", # 12475..1247F ; Unknown + "Xsux", # 12480..12543 ; Cuneiform + "Zzzz", # 12544..12F8F ; Unknown + "Cpmn", # 12F90..12FF2 ; Cypro_Minoan + "Zzzz", # 12FF3..12FFF ; Unknown + "Egyp", # 13000..13455 ; Egyptian_Hieroglyphs + "Zzzz", # 13456..1345F ; Unknown + "Egyp", # 13460..143FA ; Egyptian_Hieroglyphs + "Zzzz", # 143FB..143FF ; Unknown + "Hluw", # 14400..14646 ; Anatolian_Hieroglyphs + "Zzzz", # 14647..160FF ; Unknown + "Gukh", # 16100..16139 ; Gurung_Khema + "Zzzz", # 1613A..167FF ; Unknown + "Bamu", # 16800..16A38 ; Bamum + "Zzzz", # 16A39..16A3F ; Unknown + "Mroo", # 16A40..16A5E ; Mro + "Zzzz", # 16A5F..16A5F ; Unknown + "Mroo", # 16A60..16A69 ; Mro + "Zzzz", # 16A6A..16A6D ; Unknown + "Mroo", # 16A6E..16A6F ; Mro + "Tnsa", # 16A70..16ABE ; Tangsa + "Zzzz", # 16ABF..16ABF ; Unknown + "Tnsa", # 16AC0..16AC9 ; Tangsa + "Zzzz", # 16ACA..16ACF ; Unknown + "Bass", # 16AD0..16AED ; Bassa_Vah + "Zzzz", # 16AEE..16AEF ; Unknown + "Bass", # 16AF0..16AF5 ; Bassa_Vah + "Zzzz", # 16AF6..16AFF ; Unknown + "Hmng", # 16B00..16B45 ; Pahawh_Hmong + "Zzzz", # 16B46..16B4F ; Unknown + "Hmng", # 16B50..16B59 ; Pahawh_Hmong + "Zzzz", # 16B5A..16B5A ; Unknown + "Hmng", # 16B5B..16B61 ; Pahawh_Hmong + "Zzzz", # 16B62..16B62 ; Unknown + "Hmng", # 16B63..16B77 ; Pahawh_Hmong + "Zzzz", # 16B78..16B7C ; Unknown + "Hmng", # 16B7D..16B8F ; Pahawh_Hmong + "Zzzz", # 16B90..16D3F ; Unknown + "Krai", # 16D40..16D79 ; Kirat_Rai + "Zzzz", # 16D7A..16E3F ; Unknown + "Medf", # 16E40..16E9A ; Medefaidrin + "Zzzz", # 16E9B..16EFF ; Unknown + "Plrd", # 16F00..16F4A ; Miao + "Zzzz", # 16F4B..16F4E ; Unknown + "Plrd", # 16F4F..16F87 ; Miao + "Zzzz", # 16F88..16F8E ; Unknown + "Plrd", # 16F8F..16F9F ; Miao + "Zzzz", # 16FA0..16FDF ; Unknown + "Tang", # 16FE0..16FE0 ; Tangut + "Nshu", # 16FE1..16FE1 ; Nushu + "Hani", # 16FE2..16FE3 ; Han + "Kits", # 16FE4..16FE4 ; Khitan_Small_Script + "Zzzz", # 16FE5..16FEF ; Unknown + "Hani", # 16FF0..16FF1 ; Han + "Zzzz", # 16FF2..16FFF ; Unknown + "Tang", # 17000..187F7 ; Tangut + "Zzzz", # 187F8..187FF ; Unknown + "Tang", # 18800..18AFF ; Tangut + "Kits", # 18B00..18CD5 ; Khitan_Small_Script + "Zzzz", # 18CD6..18CFE ; Unknown + "Kits", # 18CFF..18CFF ; Khitan_Small_Script + "Tang", # 18D00..18D08 ; Tangut + "Zzzz", # 18D09..1AFEF ; Unknown + "Kana", # 1AFF0..1AFF3 ; Katakana + "Zzzz", # 1AFF4..1AFF4 ; Unknown + "Kana", # 1AFF5..1AFFB ; Katakana + "Zzzz", # 1AFFC..1AFFC ; Unknown + "Kana", # 1AFFD..1AFFE ; Katakana + "Zzzz", # 1AFFF..1AFFF ; Unknown + "Kana", # 1B000..1B000 ; Katakana + "Hira", # 1B001..1B11F ; Hiragana + "Kana", # 1B120..1B122 ; Katakana + "Zzzz", # 1B123..1B131 ; Unknown + "Hira", # 1B132..1B132 ; Hiragana + "Zzzz", # 1B133..1B14F ; Unknown + "Hira", # 1B150..1B152 ; Hiragana + "Zzzz", # 1B153..1B154 ; Unknown + "Kana", # 1B155..1B155 ; Katakana + "Zzzz", # 1B156..1B163 ; Unknown + "Kana", # 1B164..1B167 ; Katakana + "Zzzz", # 1B168..1B16F ; Unknown + "Nshu", # 1B170..1B2FB ; Nushu + "Zzzz", # 1B2FC..1BBFF ; Unknown + "Dupl", # 1BC00..1BC6A ; Duployan + "Zzzz", # 1BC6B..1BC6F ; Unknown + "Dupl", # 1BC70..1BC7C ; Duployan + "Zzzz", # 1BC7D..1BC7F ; Unknown + "Dupl", # 1BC80..1BC88 ; Duployan + "Zzzz", # 1BC89..1BC8F ; Unknown + "Dupl", # 1BC90..1BC99 ; Duployan + "Zzzz", # 1BC9A..1BC9B ; Unknown + "Dupl", # 1BC9C..1BC9F ; Duployan + "Zyyy", # 1BCA0..1BCA3 ; Common + "Zzzz", # 1BCA4..1CBFF ; Unknown + "Zyyy", # 1CC00..1CCF9 ; Common + "Zzzz", # 1CCFA..1CCFF ; Unknown + "Zyyy", # 1CD00..1CEB3 ; Common + "Zzzz", # 1CEB4..1CEFF ; Unknown + "Zinh", # 1CF00..1CF2D ; Inherited + "Zzzz", # 1CF2E..1CF2F ; Unknown + "Zinh", # 1CF30..1CF46 ; Inherited + "Zzzz", # 1CF47..1CF4F ; Unknown + "Zyyy", # 1CF50..1CFC3 ; Common + "Zzzz", # 1CFC4..1CFFF ; Unknown + "Zyyy", # 1D000..1D0F5 ; Common + "Zzzz", # 1D0F6..1D0FF ; Unknown + "Zyyy", # 1D100..1D126 ; Common + "Zzzz", # 1D127..1D128 ; Unknown + "Zyyy", # 1D129..1D166 ; Common + "Zinh", # 1D167..1D169 ; Inherited + "Zyyy", # 1D16A..1D17A ; Common + "Zinh", # 1D17B..1D182 ; Inherited + "Zyyy", # 1D183..1D184 ; Common + "Zinh", # 1D185..1D18B ; Inherited + "Zyyy", # 1D18C..1D1A9 ; Common + "Zinh", # 1D1AA..1D1AD ; Inherited + "Zyyy", # 1D1AE..1D1EA ; Common + "Zzzz", # 1D1EB..1D1FF ; Unknown + "Grek", # 1D200..1D245 ; Greek + "Zzzz", # 1D246..1D2BF ; Unknown + "Zyyy", # 1D2C0..1D2D3 ; Common + "Zzzz", # 1D2D4..1D2DF ; Unknown + "Zyyy", # 1D2E0..1D2F3 ; Common + "Zzzz", # 1D2F4..1D2FF ; Unknown + "Zyyy", # 1D300..1D356 ; Common + "Zzzz", # 1D357..1D35F ; Unknown + "Zyyy", # 1D360..1D378 ; Common + "Zzzz", # 1D379..1D3FF ; Unknown + "Zyyy", # 1D400..1D454 ; Common + "Zzzz", # 1D455..1D455 ; Unknown + "Zyyy", # 1D456..1D49C ; Common + "Zzzz", # 1D49D..1D49D ; Unknown + "Zyyy", # 1D49E..1D49F ; Common + "Zzzz", # 1D4A0..1D4A1 ; Unknown + "Zyyy", # 1D4A2..1D4A2 ; Common + "Zzzz", # 1D4A3..1D4A4 ; Unknown + "Zyyy", # 1D4A5..1D4A6 ; Common + "Zzzz", # 1D4A7..1D4A8 ; Unknown + "Zyyy", # 1D4A9..1D4AC ; Common + "Zzzz", # 1D4AD..1D4AD ; Unknown + "Zyyy", # 1D4AE..1D4B9 ; Common + "Zzzz", # 1D4BA..1D4BA ; Unknown + "Zyyy", # 1D4BB..1D4BB ; Common + "Zzzz", # 1D4BC..1D4BC ; Unknown + "Zyyy", # 1D4BD..1D4C3 ; Common + "Zzzz", # 1D4C4..1D4C4 ; Unknown + "Zyyy", # 1D4C5..1D505 ; Common + "Zzzz", # 1D506..1D506 ; Unknown + "Zyyy", # 1D507..1D50A ; Common + "Zzzz", # 1D50B..1D50C ; Unknown + "Zyyy", # 1D50D..1D514 ; Common + "Zzzz", # 1D515..1D515 ; Unknown + "Zyyy", # 1D516..1D51C ; Common + "Zzzz", # 1D51D..1D51D ; Unknown + "Zyyy", # 1D51E..1D539 ; Common + "Zzzz", # 1D53A..1D53A ; Unknown + "Zyyy", # 1D53B..1D53E ; Common + "Zzzz", # 1D53F..1D53F ; Unknown + "Zyyy", # 1D540..1D544 ; Common + "Zzzz", # 1D545..1D545 ; Unknown + "Zyyy", # 1D546..1D546 ; Common + "Zzzz", # 1D547..1D549 ; Unknown + "Zyyy", # 1D54A..1D550 ; Common + "Zzzz", # 1D551..1D551 ; Unknown + "Zyyy", # 1D552..1D6A5 ; Common + "Zzzz", # 1D6A6..1D6A7 ; Unknown + "Zyyy", # 1D6A8..1D7CB ; Common + "Zzzz", # 1D7CC..1D7CD ; Unknown + "Zyyy", # 1D7CE..1D7FF ; Common + "Sgnw", # 1D800..1DA8B ; SignWriting + "Zzzz", # 1DA8C..1DA9A ; Unknown + "Sgnw", # 1DA9B..1DA9F ; SignWriting + "Zzzz", # 1DAA0..1DAA0 ; Unknown + "Sgnw", # 1DAA1..1DAAF ; SignWriting + "Zzzz", # 1DAB0..1DEFF ; Unknown + "Latn", # 1DF00..1DF1E ; Latin + "Zzzz", # 1DF1F..1DF24 ; Unknown + "Latn", # 1DF25..1DF2A ; Latin + "Zzzz", # 1DF2B..1DFFF ; Unknown + "Glag", # 1E000..1E006 ; Glagolitic + "Zzzz", # 1E007..1E007 ; Unknown + "Glag", # 1E008..1E018 ; Glagolitic + "Zzzz", # 1E019..1E01A ; Unknown + "Glag", # 1E01B..1E021 ; Glagolitic + "Zzzz", # 1E022..1E022 ; Unknown + "Glag", # 1E023..1E024 ; Glagolitic + "Zzzz", # 1E025..1E025 ; Unknown + "Glag", # 1E026..1E02A ; Glagolitic + "Zzzz", # 1E02B..1E02F ; Unknown + "Cyrl", # 1E030..1E06D ; Cyrillic + "Zzzz", # 1E06E..1E08E ; Unknown + "Cyrl", # 1E08F..1E08F ; Cyrillic + "Zzzz", # 1E090..1E0FF ; Unknown + "Hmnp", # 1E100..1E12C ; Nyiakeng_Puachue_Hmong + "Zzzz", # 1E12D..1E12F ; Unknown + "Hmnp", # 1E130..1E13D ; Nyiakeng_Puachue_Hmong + "Zzzz", # 1E13E..1E13F ; Unknown + "Hmnp", # 1E140..1E149 ; Nyiakeng_Puachue_Hmong + "Zzzz", # 1E14A..1E14D ; Unknown + "Hmnp", # 1E14E..1E14F ; Nyiakeng_Puachue_Hmong + "Zzzz", # 1E150..1E28F ; Unknown + "Toto", # 1E290..1E2AE ; Toto + "Zzzz", # 1E2AF..1E2BF ; Unknown + "Wcho", # 1E2C0..1E2F9 ; Wancho + "Zzzz", # 1E2FA..1E2FE ; Unknown + "Wcho", # 1E2FF..1E2FF ; Wancho + "Zzzz", # 1E300..1E4CF ; Unknown + "Nagm", # 1E4D0..1E4F9 ; Nag_Mundari + "Zzzz", # 1E4FA..1E5CF ; Unknown + "Onao", # 1E5D0..1E5FA ; Ol_Onal + "Zzzz", # 1E5FB..1E5FE ; Unknown + "Onao", # 1E5FF..1E5FF ; Ol_Onal + "Zzzz", # 1E600..1E7DF ; Unknown + "Ethi", # 1E7E0..1E7E6 ; Ethiopic + "Zzzz", # 1E7E7..1E7E7 ; Unknown + "Ethi", # 1E7E8..1E7EB ; Ethiopic + "Zzzz", # 1E7EC..1E7EC ; Unknown + "Ethi", # 1E7ED..1E7EE ; Ethiopic + "Zzzz", # 1E7EF..1E7EF ; Unknown + "Ethi", # 1E7F0..1E7FE ; Ethiopic + "Zzzz", # 1E7FF..1E7FF ; Unknown + "Mend", # 1E800..1E8C4 ; Mende_Kikakui + "Zzzz", # 1E8C5..1E8C6 ; Unknown + "Mend", # 1E8C7..1E8D6 ; Mende_Kikakui + "Zzzz", # 1E8D7..1E8FF ; Unknown + "Adlm", # 1E900..1E94B ; Adlam + "Zzzz", # 1E94C..1E94F ; Unknown + "Adlm", # 1E950..1E959 ; Adlam + "Zzzz", # 1E95A..1E95D ; Unknown + "Adlm", # 1E95E..1E95F ; Adlam + "Zzzz", # 1E960..1EC70 ; Unknown + "Zyyy", # 1EC71..1ECB4 ; Common + "Zzzz", # 1ECB5..1ED00 ; Unknown + "Zyyy", # 1ED01..1ED3D ; Common + "Zzzz", # 1ED3E..1EDFF ; Unknown + "Arab", # 1EE00..1EE03 ; Arabic + "Zzzz", # 1EE04..1EE04 ; Unknown + "Arab", # 1EE05..1EE1F ; Arabic + "Zzzz", # 1EE20..1EE20 ; Unknown + "Arab", # 1EE21..1EE22 ; Arabic + "Zzzz", # 1EE23..1EE23 ; Unknown + "Arab", # 1EE24..1EE24 ; Arabic + "Zzzz", # 1EE25..1EE26 ; Unknown + "Arab", # 1EE27..1EE27 ; Arabic + "Zzzz", # 1EE28..1EE28 ; Unknown + "Arab", # 1EE29..1EE32 ; Arabic + "Zzzz", # 1EE33..1EE33 ; Unknown + "Arab", # 1EE34..1EE37 ; Arabic + "Zzzz", # 1EE38..1EE38 ; Unknown + "Arab", # 1EE39..1EE39 ; Arabic + "Zzzz", # 1EE3A..1EE3A ; Unknown + "Arab", # 1EE3B..1EE3B ; Arabic + "Zzzz", # 1EE3C..1EE41 ; Unknown + "Arab", # 1EE42..1EE42 ; Arabic + "Zzzz", # 1EE43..1EE46 ; Unknown + "Arab", # 1EE47..1EE47 ; Arabic + "Zzzz", # 1EE48..1EE48 ; Unknown + "Arab", # 1EE49..1EE49 ; Arabic + "Zzzz", # 1EE4A..1EE4A ; Unknown + "Arab", # 1EE4B..1EE4B ; Arabic + "Zzzz", # 1EE4C..1EE4C ; Unknown + "Arab", # 1EE4D..1EE4F ; Arabic + "Zzzz", # 1EE50..1EE50 ; Unknown + "Arab", # 1EE51..1EE52 ; Arabic + "Zzzz", # 1EE53..1EE53 ; Unknown + "Arab", # 1EE54..1EE54 ; Arabic + "Zzzz", # 1EE55..1EE56 ; Unknown + "Arab", # 1EE57..1EE57 ; Arabic + "Zzzz", # 1EE58..1EE58 ; Unknown + "Arab", # 1EE59..1EE59 ; Arabic + "Zzzz", # 1EE5A..1EE5A ; Unknown + "Arab", # 1EE5B..1EE5B ; Arabic + "Zzzz", # 1EE5C..1EE5C ; Unknown + "Arab", # 1EE5D..1EE5D ; Arabic + "Zzzz", # 1EE5E..1EE5E ; Unknown + "Arab", # 1EE5F..1EE5F ; Arabic + "Zzzz", # 1EE60..1EE60 ; Unknown + "Arab", # 1EE61..1EE62 ; Arabic + "Zzzz", # 1EE63..1EE63 ; Unknown + "Arab", # 1EE64..1EE64 ; Arabic + "Zzzz", # 1EE65..1EE66 ; Unknown + "Arab", # 1EE67..1EE6A ; Arabic + "Zzzz", # 1EE6B..1EE6B ; Unknown + "Arab", # 1EE6C..1EE72 ; Arabic + "Zzzz", # 1EE73..1EE73 ; Unknown + "Arab", # 1EE74..1EE77 ; Arabic + "Zzzz", # 1EE78..1EE78 ; Unknown + "Arab", # 1EE79..1EE7C ; Arabic + "Zzzz", # 1EE7D..1EE7D ; Unknown + "Arab", # 1EE7E..1EE7E ; Arabic + "Zzzz", # 1EE7F..1EE7F ; Unknown + "Arab", # 1EE80..1EE89 ; Arabic + "Zzzz", # 1EE8A..1EE8A ; Unknown + "Arab", # 1EE8B..1EE9B ; Arabic + "Zzzz", # 1EE9C..1EEA0 ; Unknown + "Arab", # 1EEA1..1EEA3 ; Arabic + "Zzzz", # 1EEA4..1EEA4 ; Unknown + "Arab", # 1EEA5..1EEA9 ; Arabic + "Zzzz", # 1EEAA..1EEAA ; Unknown + "Arab", # 1EEAB..1EEBB ; Arabic + "Zzzz", # 1EEBC..1EEEF ; Unknown + "Arab", # 1EEF0..1EEF1 ; Arabic + "Zzzz", # 1EEF2..1EFFF ; Unknown + "Zyyy", # 1F000..1F02B ; Common + "Zzzz", # 1F02C..1F02F ; Unknown + "Zyyy", # 1F030..1F093 ; Common + "Zzzz", # 1F094..1F09F ; Unknown + "Zyyy", # 1F0A0..1F0AE ; Common + "Zzzz", # 1F0AF..1F0B0 ; Unknown + "Zyyy", # 1F0B1..1F0BF ; Common + "Zzzz", # 1F0C0..1F0C0 ; Unknown + "Zyyy", # 1F0C1..1F0CF ; Common + "Zzzz", # 1F0D0..1F0D0 ; Unknown + "Zyyy", # 1F0D1..1F0F5 ; Common + "Zzzz", # 1F0F6..1F0FF ; Unknown + "Zyyy", # 1F100..1F1AD ; Common + "Zzzz", # 1F1AE..1F1E5 ; Unknown + "Zyyy", # 1F1E6..1F1FF ; Common + "Hira", # 1F200..1F200 ; Hiragana + "Zyyy", # 1F201..1F202 ; Common + "Zzzz", # 1F203..1F20F ; Unknown + "Zyyy", # 1F210..1F23B ; Common + "Zzzz", # 1F23C..1F23F ; Unknown + "Zyyy", # 1F240..1F248 ; Common + "Zzzz", # 1F249..1F24F ; Unknown + "Zyyy", # 1F250..1F251 ; Common + "Zzzz", # 1F252..1F25F ; Unknown + "Zyyy", # 1F260..1F265 ; Common + "Zzzz", # 1F266..1F2FF ; Unknown + "Zyyy", # 1F300..1F6D7 ; Common + "Zzzz", # 1F6D8..1F6DB ; Unknown + "Zyyy", # 1F6DC..1F6EC ; Common + "Zzzz", # 1F6ED..1F6EF ; Unknown + "Zyyy", # 1F6F0..1F6FC ; Common + "Zzzz", # 1F6FD..1F6FF ; Unknown + "Zyyy", # 1F700..1F776 ; Common + "Zzzz", # 1F777..1F77A ; Unknown + "Zyyy", # 1F77B..1F7D9 ; Common + "Zzzz", # 1F7DA..1F7DF ; Unknown + "Zyyy", # 1F7E0..1F7EB ; Common + "Zzzz", # 1F7EC..1F7EF ; Unknown + "Zyyy", # 1F7F0..1F7F0 ; Common + "Zzzz", # 1F7F1..1F7FF ; Unknown + "Zyyy", # 1F800..1F80B ; Common + "Zzzz", # 1F80C..1F80F ; Unknown + "Zyyy", # 1F810..1F847 ; Common + "Zzzz", # 1F848..1F84F ; Unknown + "Zyyy", # 1F850..1F859 ; Common + "Zzzz", # 1F85A..1F85F ; Unknown + "Zyyy", # 1F860..1F887 ; Common + "Zzzz", # 1F888..1F88F ; Unknown + "Zyyy", # 1F890..1F8AD ; Common + "Zzzz", # 1F8AE..1F8AF ; Unknown + "Zyyy", # 1F8B0..1F8BB ; Common + "Zzzz", # 1F8BC..1F8BF ; Unknown + "Zyyy", # 1F8C0..1F8C1 ; Common + "Zzzz", # 1F8C2..1F8FF ; Unknown + "Zyyy", # 1F900..1FA53 ; Common + "Zzzz", # 1FA54..1FA5F ; Unknown + "Zyyy", # 1FA60..1FA6D ; Common + "Zzzz", # 1FA6E..1FA6F ; Unknown + "Zyyy", # 1FA70..1FA7C ; Common + "Zzzz", # 1FA7D..1FA7F ; Unknown + "Zyyy", # 1FA80..1FA89 ; Common + "Zzzz", # 1FA8A..1FA8E ; Unknown + "Zyyy", # 1FA8F..1FAC6 ; Common + "Zzzz", # 1FAC7..1FACD ; Unknown + "Zyyy", # 1FACE..1FADC ; Common + "Zzzz", # 1FADD..1FADE ; Unknown + "Zyyy", # 1FADF..1FAE9 ; Common + "Zzzz", # 1FAEA..1FAEF ; Unknown + "Zyyy", # 1FAF0..1FAF8 ; Common + "Zzzz", # 1FAF9..1FAFF ; Unknown + "Zyyy", # 1FB00..1FB92 ; Common + "Zzzz", # 1FB93..1FB93 ; Unknown + "Zyyy", # 1FB94..1FBF9 ; Common + "Zzzz", # 1FBFA..1FFFF ; Unknown + "Hani", # 20000..2A6DF ; Han + "Zzzz", # 2A6E0..2A6FF ; Unknown + "Hani", # 2A700..2B739 ; Han + "Zzzz", # 2B73A..2B73F ; Unknown + "Hani", # 2B740..2B81D ; Han + "Zzzz", # 2B81E..2B81F ; Unknown + "Hani", # 2B820..2CEA1 ; Han + "Zzzz", # 2CEA2..2CEAF ; Unknown + "Hani", # 2CEB0..2EBE0 ; Han + "Zzzz", # 2EBE1..2EBEF ; Unknown + "Hani", # 2EBF0..2EE5D ; Han + "Zzzz", # 2EE5E..2F7FF ; Unknown + "Hani", # 2F800..2FA1D ; Han + "Zzzz", # 2FA1E..2FFFF ; Unknown + "Hani", # 30000..3134A ; Han + "Zzzz", # 3134B..3134F ; Unknown + "Hani", # 31350..323AF ; Han + "Zzzz", # 323B0..E0000 ; Unknown + "Zyyy", # E0001..E0001 ; Common + "Zzzz", # E0002..E001F ; Unknown + "Zyyy", # E0020..E007F ; Common + "Zzzz", # E0080..E00FF ; Unknown + "Zinh", # E0100..E01EF ; Inherited + "Zzzz", # E01F0..10FFFF ; Unknown +] + +NAMES = { + "Adlm": "Adlam", + "Aghb": "Caucasian_Albanian", + "Ahom": "Ahom", + "Arab": "Arabic", + "Armi": "Imperial_Aramaic", + "Armn": "Armenian", + "Avst": "Avestan", + "Bali": "Balinese", + "Bamu": "Bamum", + "Bass": "Bassa_Vah", + "Batk": "Batak", + "Beng": "Bengali", + "Bhks": "Bhaiksuki", + "Bopo": "Bopomofo", + "Brah": "Brahmi", + "Brai": "Braille", + "Bugi": "Buginese", + "Buhd": "Buhid", + "Cakm": "Chakma", + "Cans": "Canadian_Aboriginal", + "Cari": "Carian", + "Cham": "Cham", + "Cher": "Cherokee", + "Chrs": "Chorasmian", + "Copt": "Coptic", + "Cpmn": "Cypro_Minoan", + "Cprt": "Cypriot", + "Cyrl": "Cyrillic", + "Deva": "Devanagari", + "Diak": "Dives_Akuru", + "Dogr": "Dogra", + "Dsrt": "Deseret", + "Dupl": "Duployan", + "Egyp": "Egyptian_Hieroglyphs", + "Elba": "Elbasan", + "Elym": "Elymaic", + "Ethi": "Ethiopic", + "Gara": "Garay", + "Geor": "Georgian", + "Glag": "Glagolitic", + "Gong": "Gunjala_Gondi", + "Gonm": "Masaram_Gondi", + "Goth": "Gothic", + "Gran": "Grantha", + "Grek": "Greek", + "Gujr": "Gujarati", + "Gukh": "Gurung_Khema", + "Guru": "Gurmukhi", + "Hang": "Hangul", + "Hani": "Han", + "Hano": "Hanunoo", + "Hatr": "Hatran", + "Hebr": "Hebrew", + "Hira": "Hiragana", + "Hluw": "Anatolian_Hieroglyphs", + "Hmng": "Pahawh_Hmong", + "Hmnp": "Nyiakeng_Puachue_Hmong", + "Hrkt": "Katakana_Or_Hiragana", + "Hung": "Old_Hungarian", + "Ital": "Old_Italic", + "Java": "Javanese", + "Kali": "Kayah_Li", + "Kana": "Katakana", + "Kawi": "Kawi", + "Khar": "Kharoshthi", + "Khmr": "Khmer", + "Khoj": "Khojki", + "Kits": "Khitan_Small_Script", + "Knda": "Kannada", + "Krai": "Kirat_Rai", + "Kthi": "Kaithi", + "Lana": "Tai_Tham", + "Laoo": "Lao", + "Latn": "Latin", + "Lepc": "Lepcha", + "Limb": "Limbu", + "Lina": "Linear_A", + "Linb": "Linear_B", + "Lisu": "Lisu", + "Lyci": "Lycian", + "Lydi": "Lydian", + "Mahj": "Mahajani", + "Maka": "Makasar", + "Mand": "Mandaic", + "Mani": "Manichaean", + "Marc": "Marchen", + "Medf": "Medefaidrin", + "Mend": "Mende_Kikakui", + "Merc": "Meroitic_Cursive", + "Mero": "Meroitic_Hieroglyphs", + "Mlym": "Malayalam", + "Modi": "Modi", + "Mong": "Mongolian", + "Mroo": "Mro", + "Mtei": "Meetei_Mayek", + "Mult": "Multani", + "Mymr": "Myanmar", + "Nagm": "Nag_Mundari", + "Nand": "Nandinagari", + "Narb": "Old_North_Arabian", + "Nbat": "Nabataean", + "Newa": "Newa", + "Nkoo": "Nko", + "Nshu": "Nushu", + "Ogam": "Ogham", + "Olck": "Ol_Chiki", + "Onao": "Ol_Onal", + "Orkh": "Old_Turkic", + "Orya": "Oriya", + "Osge": "Osage", + "Osma": "Osmanya", + "Ougr": "Old_Uyghur", + "Palm": "Palmyrene", + "Pauc": "Pau_Cin_Hau", + "Perm": "Old_Permic", + "Phag": "Phags_Pa", + "Phli": "Inscriptional_Pahlavi", + "Phlp": "Psalter_Pahlavi", + "Phnx": "Phoenician", + "Plrd": "Miao", + "Prti": "Inscriptional_Parthian", + "Rjng": "Rejang", + "Rohg": "Hanifi_Rohingya", + "Runr": "Runic", + "Samr": "Samaritan", + "Sarb": "Old_South_Arabian", + "Saur": "Saurashtra", + "Sgnw": "SignWriting", + "Shaw": "Shavian", + "Shrd": "Sharada", + "Sidd": "Siddham", + "Sind": "Khudawadi", + "Sinh": "Sinhala", + "Sogd": "Sogdian", + "Sogo": "Old_Sogdian", + "Sora": "Sora_Sompeng", + "Soyo": "Soyombo", + "Sund": "Sundanese", + "Sunu": "Sunuwar", + "Sylo": "Syloti_Nagri", + "Syrc": "Syriac", + "Tagb": "Tagbanwa", + "Takr": "Takri", + "Tale": "Tai_Le", + "Talu": "New_Tai_Lue", + "Taml": "Tamil", + "Tang": "Tangut", + "Tavt": "Tai_Viet", + "Telu": "Telugu", + "Tfng": "Tifinagh", + "Tglg": "Tagalog", + "Thaa": "Thaana", + "Thai": "Thai", + "Tibt": "Tibetan", + "Tirh": "Tirhuta", + "Tnsa": "Tangsa", + "Todr": "Todhri", + "Toto": "Toto", + "Tutg": "Tulu_Tigalari", + "Ugar": "Ugaritic", + "Vaii": "Vai", + "Vith": "Vithkuqi", + "Wara": "Warang_Citi", + "Wcho": "Wancho", + "Xpeo": "Old_Persian", + "Xsux": "Cuneiform", + "Yezi": "Yezidi", + "Yiii": "Yi", + "Zanb": "Zanabazar_Square", + "Zinh": "Inherited", + "Zyyy": "Common", + "Zzzz": "Unknown", +} diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/builder.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..456c34c4dd903d1ba7704c46bfb8e2af212cd0a4 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/builder.py @@ -0,0 +1,215 @@ +from fontTools import ttLib +from fontTools.ttLib.tables import otTables as ot + +# VariationStore + + +def buildVarRegionAxis(axisSupport): + self = ot.VarRegionAxis() + self.StartCoord, self.PeakCoord, self.EndCoord = [float(v) for v in axisSupport] + return self + + +def buildSparseVarRegionAxis(axisIndex, axisSupport): + self = ot.SparseVarRegionAxis() + self.AxisIndex = axisIndex + self.StartCoord, self.PeakCoord, self.EndCoord = [float(v) for v in axisSupport] + return self + + +def buildVarRegion(support, axisTags): + assert all(tag in axisTags for tag in support.keys()), ( + "Unknown axis tag found.", + support, + axisTags, + ) + self = ot.VarRegion() + self.VarRegionAxis = [] + for tag in axisTags: + self.VarRegionAxis.append(buildVarRegionAxis(support.get(tag, (0, 0, 0)))) + return self + + +def buildSparseVarRegion(support, axisTags): + assert all(tag in axisTags for tag in support.keys()), ( + "Unknown axis tag found.", + support, + axisTags, + ) + self = ot.SparseVarRegion() + self.SparseVarRegionAxis = [] + for i, tag in enumerate(axisTags): + if tag not in support: + continue + self.SparseVarRegionAxis.append( + buildSparseVarRegionAxis(i, support.get(tag, (0, 0, 0))) + ) + self.SparseRegionCount = len(self.SparseVarRegionAxis) + return self + + +def buildVarRegionList(supports, axisTags): + self = ot.VarRegionList() + self.RegionAxisCount = len(axisTags) + self.Region = [] + for support in supports: + self.Region.append(buildVarRegion(support, axisTags)) + self.RegionCount = len(self.Region) + return self + + +def buildSparseVarRegionList(supports, axisTags): + self = ot.SparseVarRegionList() + self.RegionAxisCount = len(axisTags) + self.Region = [] + for support in supports: + self.Region.append(buildSparseVarRegion(support, axisTags)) + self.RegionCount = len(self.Region) + return self + + +def _reorderItem(lst, mapping): + return [lst[i] for i in mapping] + + +def VarData_calculateNumShorts(self, optimize=False): + count = self.VarRegionCount + items = self.Item + bit_lengths = [0] * count + for item in items: + # The "+ (i < -1)" magic is to handle two's-compliment. + # That is, we want to get back 7 for -128, whereas + # bit_length() returns 8. Similarly for -65536. + # The reason "i < -1" is used instead of "i < 0" is that + # the latter would make it return 0 for "-1" instead of 1. + bl = [(i + (i < -1)).bit_length() for i in item] + bit_lengths = [max(*pair) for pair in zip(bl, bit_lengths)] + # The addition of 8, instead of seven, is to account for the sign bit. + # This "((b + 8) >> 3) if b else 0" when combined with the above + # "(i + (i < -1)).bit_length()" is a faster way to compute byte-lengths + # conforming to: + # + # byte_length = (0 if i == 0 else + # 1 if -128 <= i < 128 else + # 2 if -65536 <= i < 65536 else + # ...) + byte_lengths = [((b + 8) >> 3) if b else 0 for b in bit_lengths] + + # https://github.com/fonttools/fonttools/issues/2279 + longWords = any(b > 2 for b in byte_lengths) + + if optimize: + # Reorder columns such that wider columns come before narrower columns + mapping = [] + mapping.extend(i for i, b in enumerate(byte_lengths) if b > 2) + mapping.extend(i for i, b in enumerate(byte_lengths) if b == 2) + mapping.extend(i for i, b in enumerate(byte_lengths) if b == 1) + + byte_lengths = _reorderItem(byte_lengths, mapping) + self.VarRegionIndex = _reorderItem(self.VarRegionIndex, mapping) + self.VarRegionCount = len(self.VarRegionIndex) + for i in range(len(items)): + items[i] = _reorderItem(items[i], mapping) + + if longWords: + self.NumShorts = ( + max((i for i, b in enumerate(byte_lengths) if b > 2), default=-1) + 1 + ) + self.NumShorts |= 0x8000 + else: + self.NumShorts = ( + max((i for i, b in enumerate(byte_lengths) if b > 1), default=-1) + 1 + ) + + self.VarRegionCount = len(self.VarRegionIndex) + return self + + +ot.VarData.calculateNumShorts = VarData_calculateNumShorts + + +def VarData_CalculateNumShorts(self, optimize=True): + """Deprecated name for VarData_calculateNumShorts() which + defaults to optimize=True. Use varData.calculateNumShorts() + or varData.optimize().""" + return VarData_calculateNumShorts(self, optimize=optimize) + + +def VarData_optimize(self): + return VarData_calculateNumShorts(self, optimize=True) + + +ot.VarData.optimize = VarData_optimize + + +def buildVarData(varRegionIndices, items, optimize=True): + self = ot.VarData() + self.VarRegionIndex = list(varRegionIndices) + regionCount = self.VarRegionCount = len(self.VarRegionIndex) + records = self.Item = [] + if items: + for item in items: + assert len(item) == regionCount + records.append(list(item)) + self.ItemCount = len(self.Item) + self.calculateNumShorts(optimize=optimize) + return self + + +def buildVarStore(varRegionList, varDataList): + self = ot.VarStore() + self.Format = 1 + self.VarRegionList = varRegionList + self.VarData = list(varDataList) + self.VarDataCount = len(self.VarData) + return self + + +def buildMultiVarData(varRegionIndices, items): + self = ot.MultiVarData() + self.Format = 1 + self.VarRegionIndex = list(varRegionIndices) + regionCount = self.VarRegionCount = len(self.VarRegionIndex) + records = self.Item = [] + if items: + for item in items: + assert len(item) == regionCount + records.append(list(item)) + self.ItemCount = len(self.Item) + return self + + +def buildMultiVarStore(varRegionList, multiVarDataList): + self = ot.MultiVarStore() + self.Format = 1 + self.SparseVarRegionList = varRegionList + self.MultiVarData = list(multiVarDataList) + self.MultiVarDataCount = len(self.MultiVarData) + return self + + +# Variation helpers + + +def buildVarIdxMap(varIdxes, glyphOrder): + self = ot.VarIdxMap() + self.mapping = {g: v for g, v in zip(glyphOrder, varIdxes)} + return self + + +def buildDeltaSetIndexMap(varIdxes): + mapping = list(varIdxes) + if all(i == v for i, v in enumerate(mapping)): + return None + self = ot.DeltaSetIndexMap() + self.mapping = mapping + self.Format = 1 if len(mapping) > 0xFFFF else 0 + return self + + +def buildVarDevTable(varIdx): + self = ot.Device() + self.DeltaFormat = 0x8000 + self.StartSize = varIdx >> 16 + self.EndSize = varIdx & 0xFFFF + return self diff --git a/evalkit_tf437/lib/python3.10/site-packages/fontTools/voltLib/voltToFea.py b/evalkit_tf437/lib/python3.10/site-packages/fontTools/voltLib/voltToFea.py new file mode 100644 index 0000000000000000000000000000000000000000..c77d5ad11118e5ad4c82650d21e4038faf0200c8 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/fontTools/voltLib/voltToFea.py @@ -0,0 +1,730 @@ +"""\ +MS VOLT ``.vtp`` to AFDKO ``.fea`` OpenType Layout converter. + +Usage +----- + +To convert a VTP project file: + + +.. code-block:: sh + + $ fonttools voltLib.voltToFea input.vtp output.fea + +It is also possible convert font files with `TSIV` table (as saved from Volt), +in this case the glyph names used in the Volt project will be mapped to the +actual glyph names in the font files when written to the feature file: + +.. code-block:: sh + + $ fonttools voltLib.voltToFea input.ttf output.fea + +The ``--quiet`` option can be used to suppress warnings. + +The ``--traceback`` can be used to get Python traceback in case of exceptions, +instead of suppressing the traceback. + + +Limitations +----------- + +* Not all VOLT features are supported, the script will error if it it + encounters something it does not understand. Please report an issue if this + happens. +* AFDKO feature file syntax for mark positioning is awkward and does not allow + setting the mark coverage. It also defines mark anchors globally, as a result + some mark positioning lookups might cover many marks than what was in the VOLT + file. This should not be an issue in practice, but if it is then the only way + is to modify the VOLT file or the generated feature file manually to use unique + mark anchors for each lookup. +* VOLT allows subtable breaks in any lookup type, but AFDKO feature file + implementations vary in their support; currently AFDKO’s makeOTF supports + subtable breaks in pair positioning lookups only, while FontTools’ feaLib + support it for most substitution lookups and only some positioning lookups. +""" + +import logging +import re +from io import StringIO + +from fontTools.feaLib import ast +from fontTools.ttLib import TTFont, TTLibError +from fontTools.voltLib import ast as VAst +from fontTools.voltLib.parser import Parser as VoltParser + +log = logging.getLogger("fontTools.voltLib.voltToFea") + +TABLES = ["GDEF", "GSUB", "GPOS"] + + +class MarkClassDefinition(ast.MarkClassDefinition): + def asFea(self, indent=""): + res = "" + if not getattr(self, "used", False): + res += "#" + res += ast.MarkClassDefinition.asFea(self, indent) + return res + + +# For sorting voltLib.ast.GlyphDefinition, see its use below. +class Group: + def __init__(self, group): + self.name = group.name.lower() + self.groups = [ + x.group.lower() for x in group.enum.enum if isinstance(x, VAst.GroupName) + ] + + def __lt__(self, other): + if self.name in other.groups: + return True + if other.name in self.groups: + return False + if self.groups and not other.groups: + return False + if not self.groups and other.groups: + return True + + +class VoltToFea: + _NOT_LOOKUP_NAME_RE = re.compile(r"[^A-Za-z_0-9.]") + _NOT_CLASS_NAME_RE = re.compile(r"[^A-Za-z_0-9.\-]") + + def __init__(self, file_or_path, font=None): + self._file_or_path = file_or_path + self._font = font + + self._glyph_map = {} + self._glyph_order = None + + self._gdef = {} + self._glyphclasses = {} + self._features = {} + self._lookups = {} + + self._marks = set() + self._ligatures = {} + + self._markclasses = {} + self._anchors = {} + + self._settings = {} + + self._lookup_names = {} + self._class_names = {} + + def _lookupName(self, name): + if name not in self._lookup_names: + res = self._NOT_LOOKUP_NAME_RE.sub("_", name) + while res in self._lookup_names.values(): + res += "_" + self._lookup_names[name] = res + return self._lookup_names[name] + + def _className(self, name): + if name not in self._class_names: + res = self._NOT_CLASS_NAME_RE.sub("_", name) + while res in self._class_names.values(): + res += "_" + self._class_names[name] = res + return self._class_names[name] + + def _collectStatements(self, doc, tables): + # Collect and sort group definitions first, to make sure a group + # definition that references other groups comes after them since VOLT + # does not enforce such ordering, and feature file require it. + groups = [s for s in doc.statements if isinstance(s, VAst.GroupDefinition)] + for statement in sorted(groups, key=lambda x: Group(x)): + self._groupDefinition(statement) + + for statement in doc.statements: + if isinstance(statement, VAst.GlyphDefinition): + self._glyphDefinition(statement) + elif isinstance(statement, VAst.AnchorDefinition): + if "GPOS" in tables: + self._anchorDefinition(statement) + elif isinstance(statement, VAst.SettingDefinition): + self._settingDefinition(statement) + elif isinstance(statement, VAst.GroupDefinition): + pass # Handled above + elif isinstance(statement, VAst.ScriptDefinition): + self._scriptDefinition(statement) + elif not isinstance(statement, VAst.LookupDefinition): + raise NotImplementedError(statement) + + # Lookup definitions need to be handled last as they reference glyph + # and mark classes that might be defined after them. + for statement in doc.statements: + if isinstance(statement, VAst.LookupDefinition): + if statement.pos and "GPOS" not in tables: + continue + if statement.sub and "GSUB" not in tables: + continue + self._lookupDefinition(statement) + + def _buildFeatureFile(self, tables): + doc = ast.FeatureFile() + statements = doc.statements + + if self._glyphclasses: + statements.append(ast.Comment("# Glyph classes")) + statements.extend(self._glyphclasses.values()) + + if self._markclasses: + statements.append(ast.Comment("\n# Mark classes")) + statements.extend(c[1] for c in sorted(self._markclasses.items())) + + if self._lookups: + statements.append(ast.Comment("\n# Lookups")) + for lookup in self._lookups.values(): + statements.extend(getattr(lookup, "targets", [])) + statements.append(lookup) + + # Prune features + features = self._features.copy() + for ftag in features: + scripts = features[ftag] + for stag in scripts: + langs = scripts[stag] + for ltag in langs: + langs[ltag] = [l for l in langs[ltag] if l.lower() in self._lookups] + scripts[stag] = {t: l for t, l in langs.items() if l} + features[ftag] = {t: s for t, s in scripts.items() if s} + features = {t: f for t, f in features.items() if f} + + if features: + statements.append(ast.Comment("# Features")) + for ftag, scripts in features.items(): + feature = ast.FeatureBlock(ftag) + stags = sorted(scripts, key=lambda k: 0 if k == "DFLT" else 1) + for stag in stags: + feature.statements.append(ast.ScriptStatement(stag)) + ltags = sorted(scripts[stag], key=lambda k: 0 if k == "dflt" else 1) + for ltag in ltags: + include_default = True if ltag == "dflt" else False + feature.statements.append( + ast.LanguageStatement(ltag, include_default=include_default) + ) + for name in scripts[stag][ltag]: + lookup = self._lookups[name.lower()] + lookupref = ast.LookupReferenceStatement(lookup) + feature.statements.append(lookupref) + statements.append(feature) + + if self._gdef and "GDEF" in tables: + classes = [] + for name in ("BASE", "MARK", "LIGATURE", "COMPONENT"): + if name in self._gdef: + classname = "GDEF_" + name.lower() + glyphclass = ast.GlyphClassDefinition(classname, self._gdef[name]) + statements.append(glyphclass) + classes.append(ast.GlyphClassName(glyphclass)) + else: + classes.append(None) + + gdef = ast.TableBlock("GDEF") + gdef.statements.append(ast.GlyphClassDefStatement(*classes)) + statements.append(gdef) + + return doc + + def convert(self, tables=None): + doc = VoltParser(self._file_or_path).parse() + + if tables is None: + tables = TABLES + if self._font is not None: + self._glyph_order = self._font.getGlyphOrder() + + self._collectStatements(doc, tables) + fea = self._buildFeatureFile(tables) + return fea.asFea() + + def _glyphName(self, glyph): + try: + name = glyph.glyph + except AttributeError: + name = glyph + return ast.GlyphName(self._glyph_map.get(name, name)) + + def _groupName(self, group): + try: + name = group.group + except AttributeError: + name = group + return ast.GlyphClassName(self._glyphclasses[name.lower()]) + + def _coverage(self, coverage): + items = [] + for item in coverage: + if isinstance(item, VAst.GlyphName): + items.append(self._glyphName(item)) + elif isinstance(item, VAst.GroupName): + items.append(self._groupName(item)) + elif isinstance(item, VAst.Enum): + items.append(self._enum(item)) + elif isinstance(item, VAst.Range): + items.append((item.start, item.end)) + else: + raise NotImplementedError(item) + return items + + def _enum(self, enum): + return ast.GlyphClass(self._coverage(enum.enum)) + + def _context(self, context): + out = [] + for item in context: + coverage = self._coverage(item) + if not isinstance(coverage, (tuple, list)): + coverage = [coverage] + out.extend(coverage) + return out + + def _groupDefinition(self, group): + name = self._className(group.name) + glyphs = self._enum(group.enum) + glyphclass = ast.GlyphClassDefinition(name, glyphs) + + self._glyphclasses[group.name.lower()] = glyphclass + + def _glyphDefinition(self, glyph): + try: + self._glyph_map[glyph.name] = self._glyph_order[glyph.id] + except TypeError: + pass + + if glyph.type in ("BASE", "MARK", "LIGATURE", "COMPONENT"): + if glyph.type not in self._gdef: + self._gdef[glyph.type] = ast.GlyphClass() + self._gdef[glyph.type].glyphs.append(self._glyphName(glyph.name)) + + if glyph.type == "MARK": + self._marks.add(glyph.name) + elif glyph.type == "LIGATURE": + self._ligatures[glyph.name] = glyph.components + + def _scriptDefinition(self, script): + stag = script.tag + for lang in script.langs: + ltag = lang.tag + for feature in lang.features: + lookups = {l.split("\\")[0]: True for l in feature.lookups} + ftag = feature.tag + if ftag not in self._features: + self._features[ftag] = {} + if stag not in self._features[ftag]: + self._features[ftag][stag] = {} + assert ltag not in self._features[ftag][stag] + self._features[ftag][stag][ltag] = lookups.keys() + + def _settingDefinition(self, setting): + if setting.name.startswith("COMPILER_"): + self._settings[setting.name] = setting.value + else: + log.warning(f"Unsupported setting ignored: {setting.name}") + + def _adjustment(self, adjustment): + adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment + + adv_device = adv_adjust_by and adv_adjust_by.items() or None + dx_device = dx_adjust_by and dx_adjust_by.items() or None + dy_device = dy_adjust_by and dy_adjust_by.items() or None + + return ast.ValueRecord( + xPlacement=dx, + yPlacement=dy, + xAdvance=adv, + xPlaDevice=dx_device, + yPlaDevice=dy_device, + xAdvDevice=adv_device, + ) + + def _anchor(self, adjustment): + adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment + + assert not adv_adjust_by + dx_device = dx_adjust_by and dx_adjust_by.items() or None + dy_device = dy_adjust_by and dy_adjust_by.items() or None + + return ast.Anchor( + dx or 0, + dy or 0, + xDeviceTable=dx_device or None, + yDeviceTable=dy_device or None, + ) + + def _anchorDefinition(self, anchordef): + anchorname = anchordef.name + glyphname = anchordef.glyph_name + anchor = self._anchor(anchordef.pos) + + if anchorname.startswith("MARK_"): + name = "_".join(anchorname.split("_")[1:]) + markclass = ast.MarkClass(self._className(name)) + glyph = self._glyphName(glyphname) + markdef = MarkClassDefinition(markclass, anchor, glyph) + self._markclasses[(glyphname, anchorname)] = markdef + else: + if glyphname not in self._anchors: + self._anchors[glyphname] = {} + if anchorname not in self._anchors[glyphname]: + self._anchors[glyphname][anchorname] = {} + self._anchors[glyphname][anchorname][anchordef.component] = anchor + + def _gposLookup(self, lookup, fealookup): + statements = fealookup.statements + + pos = lookup.pos + if isinstance(pos, VAst.PositionAdjustPairDefinition): + for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items(): + coverage_1 = pos.coverages_1[idx1 - 1] + coverage_2 = pos.coverages_2[idx2 - 1] + + # If not both are groups, use “enum pos” otherwise makeotf will + # fail. + enumerated = False + for item in coverage_1 + coverage_2: + if not isinstance(item, VAst.GroupName): + enumerated = True + + glyphs1 = self._coverage(coverage_1) + glyphs2 = self._coverage(coverage_2) + record1 = self._adjustment(pos1) + record2 = self._adjustment(pos2) + assert len(glyphs1) == 1 + assert len(glyphs2) == 1 + statements.append( + ast.PairPosStatement( + glyphs1[0], record1, glyphs2[0], record2, enumerated=enumerated + ) + ) + elif isinstance(pos, VAst.PositionAdjustSingleDefinition): + for a, b in pos.adjust_single: + glyphs = self._coverage(a) + record = self._adjustment(b) + assert len(glyphs) == 1 + statements.append( + ast.SinglePosStatement([(glyphs[0], record)], [], [], False) + ) + elif isinstance(pos, VAst.PositionAttachDefinition): + anchors = {} + for marks, classname in pos.coverage_to: + for mark in marks: + # Set actually used mark classes. Basically a hack to get + # around the feature file syntax limitation of making mark + # classes global and not allowing mark positioning to + # specify mark coverage. + for name in mark.glyphSet(): + key = (name, "MARK_" + classname) + self._markclasses[key].used = True + markclass = ast.MarkClass(self._className(classname)) + for base in pos.coverage: + for name in base.glyphSet(): + if name not in anchors: + anchors[name] = [] + if classname not in anchors[name]: + anchors[name].append(classname) + + for name in anchors: + components = 1 + if name in self._ligatures: + components = self._ligatures[name] + + marks = [] + for mark in anchors[name]: + markclass = ast.MarkClass(self._className(mark)) + for component in range(1, components + 1): + if len(marks) < component: + marks.append([]) + anchor = None + if component in self._anchors[name][mark]: + anchor = self._anchors[name][mark][component] + marks[component - 1].append((anchor, markclass)) + + base = self._glyphName(name) + if name in self._marks: + mark = ast.MarkMarkPosStatement(base, marks[0]) + elif name in self._ligatures: + mark = ast.MarkLigPosStatement(base, marks) + else: + mark = ast.MarkBasePosStatement(base, marks[0]) + statements.append(mark) + elif isinstance(pos, VAst.PositionAttachCursiveDefinition): + # Collect enter and exit glyphs + enter_coverage = [] + for coverage in pos.coverages_enter: + for base in coverage: + for name in base.glyphSet(): + enter_coverage.append(name) + exit_coverage = [] + for coverage in pos.coverages_exit: + for base in coverage: + for name in base.glyphSet(): + exit_coverage.append(name) + + # Write enter anchors, also check if the glyph has exit anchor and + # write it, too. + for name in enter_coverage: + glyph = self._glyphName(name) + entry = self._anchors[name]["entry"][1] + exit = None + if name in exit_coverage: + exit = self._anchors[name]["exit"][1] + exit_coverage.pop(exit_coverage.index(name)) + statements.append(ast.CursivePosStatement(glyph, entry, exit)) + + # Write any remaining exit anchors. + for name in exit_coverage: + glyph = self._glyphName(name) + exit = self._anchors[name]["exit"][1] + statements.append(ast.CursivePosStatement(glyph, None, exit)) + else: + raise NotImplementedError(pos) + + def _gposContextLookup( + self, lookup, prefix, suffix, ignore, fealookup, targetlookup + ): + statements = fealookup.statements + + assert not lookup.reversal + + pos = lookup.pos + if isinstance(pos, VAst.PositionAdjustPairDefinition): + for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items(): + glyphs1 = self._coverage(pos.coverages_1[idx1 - 1]) + glyphs2 = self._coverage(pos.coverages_2[idx2 - 1]) + assert len(glyphs1) == 1 + assert len(glyphs2) == 1 + glyphs = (glyphs1[0], glyphs2[0]) + + if ignore: + statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)]) + else: + lookups = (targetlookup, targetlookup) + statement = ast.ChainContextPosStatement( + prefix, glyphs, suffix, lookups + ) + statements.append(statement) + elif isinstance(pos, VAst.PositionAdjustSingleDefinition): + glyphs = [ast.GlyphClass()] + for a, b in pos.adjust_single: + glyph = self._coverage(a) + glyphs[0].extend(glyph) + + if ignore: + statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)]) + else: + statement = ast.ChainContextPosStatement( + prefix, glyphs, suffix, [targetlookup] + ) + statements.append(statement) + elif isinstance(pos, VAst.PositionAttachDefinition): + glyphs = [ast.GlyphClass()] + for coverage, _ in pos.coverage_to: + glyphs[0].extend(self._coverage(coverage)) + + if ignore: + statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)]) + else: + statement = ast.ChainContextPosStatement( + prefix, glyphs, suffix, [targetlookup] + ) + statements.append(statement) + else: + raise NotImplementedError(pos) + + def _gsubLookup(self, lookup, prefix, suffix, ignore, chain, fealookup): + statements = fealookup.statements + + sub = lookup.sub + for key, val in sub.mapping.items(): + if not key or not val: + path, line, column = sub.location + log.warning(f"{path}:{line}:{column}: Ignoring empty substitution") + continue + statement = None + glyphs = self._coverage(key) + replacements = self._coverage(val) + if ignore: + chain_context = (prefix, glyphs, suffix) + statement = ast.IgnoreSubstStatement([chain_context]) + elif isinstance(sub, VAst.SubstitutionSingleDefinition): + assert len(glyphs) == 1 + assert len(replacements) == 1 + statement = ast.SingleSubstStatement( + glyphs, replacements, prefix, suffix, chain + ) + elif isinstance(sub, VAst.SubstitutionReverseChainingSingleDefinition): + assert len(glyphs) == 1 + assert len(replacements) == 1 + statement = ast.ReverseChainSingleSubstStatement( + prefix, suffix, glyphs, replacements + ) + elif isinstance(sub, VAst.SubstitutionMultipleDefinition): + assert len(glyphs) == 1 + statement = ast.MultipleSubstStatement( + prefix, glyphs[0], suffix, replacements, chain + ) + elif isinstance(sub, VAst.SubstitutionLigatureDefinition): + assert len(replacements) == 1 + statement = ast.LigatureSubstStatement( + prefix, glyphs, suffix, replacements[0], chain + ) + else: + raise NotImplementedError(sub) + statements.append(statement) + + def _lookupDefinition(self, lookup): + mark_attachement = None + mark_filtering = None + + flags = 0 + if lookup.direction == "RTL": + flags |= 1 + if not lookup.process_base: + flags |= 2 + # FIXME: Does VOLT support this? + # if not lookup.process_ligatures: + # flags |= 4 + if not lookup.process_marks: + flags |= 8 + elif isinstance(lookup.process_marks, str): + mark_attachement = self._groupName(lookup.process_marks) + elif lookup.mark_glyph_set is not None: + mark_filtering = self._groupName(lookup.mark_glyph_set) + + lookupflags = None + if flags or mark_attachement is not None or mark_filtering is not None: + lookupflags = ast.LookupFlagStatement( + flags, mark_attachement, mark_filtering + ) + if "\\" in lookup.name: + # Merge sub lookups as subtables (lookups named “base\sub”), + # makeotf/feaLib will issue a warning and ignore the subtable + # statement if it is not a pairpos lookup, though. + name = lookup.name.split("\\")[0] + if name.lower() not in self._lookups: + fealookup = ast.LookupBlock(self._lookupName(name)) + if lookupflags is not None: + fealookup.statements.append(lookupflags) + fealookup.statements.append(ast.Comment("# " + lookup.name)) + else: + fealookup = self._lookups[name.lower()] + fealookup.statements.append(ast.SubtableStatement()) + fealookup.statements.append(ast.Comment("# " + lookup.name)) + self._lookups[name.lower()] = fealookup + else: + fealookup = ast.LookupBlock(self._lookupName(lookup.name)) + if lookupflags is not None: + fealookup.statements.append(lookupflags) + self._lookups[lookup.name.lower()] = fealookup + + if lookup.comments is not None: + fealookup.statements.append(ast.Comment("# " + lookup.comments)) + + contexts = [] + if lookup.context: + for context in lookup.context: + prefix = self._context(context.left) + suffix = self._context(context.right) + ignore = context.ex_or_in == "EXCEPT_CONTEXT" + contexts.append([prefix, suffix, ignore, False]) + # It seems that VOLT will create contextual substitution using + # only the input if there is no other contexts in this lookup. + if ignore and len(lookup.context) == 1: + contexts.append([[], [], False, True]) + else: + contexts.append([[], [], False, False]) + + targetlookup = None + for prefix, suffix, ignore, chain in contexts: + if lookup.sub is not None: + self._gsubLookup(lookup, prefix, suffix, ignore, chain, fealookup) + + if lookup.pos is not None: + if self._settings.get("COMPILER_USEEXTENSIONLOOKUPS"): + fealookup.use_extension = True + if prefix or suffix or chain or ignore: + if not ignore and targetlookup is None: + targetname = self._lookupName(lookup.name + " target") + targetlookup = ast.LookupBlock(targetname) + fealookup.targets = getattr(fealookup, "targets", []) + fealookup.targets.append(targetlookup) + self._gposLookup(lookup, targetlookup) + self._gposContextLookup( + lookup, prefix, suffix, ignore, fealookup, targetlookup + ) + else: + self._gposLookup(lookup, fealookup) + + +def main(args=None): + """Convert MS VOLT to AFDKO feature files.""" + + import argparse + from pathlib import Path + + from fontTools import configLogger + + parser = argparse.ArgumentParser( + "fonttools voltLib.voltToFea", description=main.__doc__ + ) + parser.add_argument( + "input", metavar="INPUT", type=Path, help="input font/VTP file to process" + ) + parser.add_argument( + "featurefile", metavar="OUTPUT", type=Path, help="output feature file" + ) + parser.add_argument( + "-t", + "--table", + action="append", + choices=TABLES, + dest="tables", + help="List of tables to write, by default all tables are written", + ) + parser.add_argument( + "-q", "--quiet", action="store_true", help="Suppress non-error messages" + ) + parser.add_argument( + "--traceback", action="store_true", help="Don’t catch exceptions" + ) + + options = parser.parse_args(args) + + configLogger(level=("ERROR" if options.quiet else "INFO")) + + file_or_path = options.input + font = None + try: + font = TTFont(file_or_path) + if "TSIV" in font: + file_or_path = StringIO(font["TSIV"].data.decode("utf-8")) + else: + log.error('"TSIV" table is missing, font was not saved from VOLT?') + return 1 + except TTLibError: + pass + + converter = VoltToFea(file_or_path, font) + try: + fea = converter.convert(options.tables) + except NotImplementedError as e: + if options.traceback: + raise + location = getattr(e.args[0], "location", None) + message = f'"{e}" is not supported' + if location: + path, line, column = location + log.error(f"{path}:{line}:{column}: {message}") + else: + log.error(message) + return 1 + with open(options.featurefile, "w") as feafile: + feafile.write(fea) + + +if __name__ == "__main__": + import sys + + sys.exit(main()) diff --git a/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.C-HpLIxS.js.gz b/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.C-HpLIxS.js.gz new file mode 100644 index 0000000000000000000000000000000000000000..3b16e63eb293b6f43b458ccf27ae015f77837410 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.C-HpLIxS.js.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28428c7a53cef201a69abd5670b2c69ad4255b4cdc7bd5c35d43f16c2458feb1 +size 15691 diff --git a/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.C2Y4ubNb.js.gz b/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.C2Y4ubNb.js.gz new file mode 100644 index 0000000000000000000000000000000000000000..764d074b36b6219b943a8affaf47514b404c2e24 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.C2Y4ubNb.js.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e96f1cab22b29cec56ddf48a16e72e89a68bac1d6c665b925d709929122581d +size 4291 diff --git a/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/index.Mc7UGGd2.js.gz b/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/index.Mc7UGGd2.js.gz new file mode 100644 index 0000000000000000000000000000000000000000..edf7b06a16aacc7defb5a4429913b227ac98e816 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/index.Mc7UGGd2.js.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eefef885bc3a4f02c7bc6b6ad5a27a3d5bb3f39dc489435dcd164b59c3d588e7 +size 13942