Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- evalkit_tf437/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA +93 -0
- evalkit_tf437/lib/python3.10/site-packages/fastapi-0.103.2.dist-info/REQUESTED +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fastapi-0.103.2.dist-info/licenses/LICENSE +21 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/etree.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psCharStrings.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/testTools.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/plistlib/__init__.py +681 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/__pycache__/__main__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/interpolatable.py +1148 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/interpolatableHelpers.py +396 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/interpolatablePlot.py +1269 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/plot.py +238 -0
- evalkit_tf437/lib/python3.10/site-packages/google_crc32c/__config__.py +38 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/console.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/filter.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/plugin.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/regexopt.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/style.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/token.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/unistring.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/archetype.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/arrow.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/asc.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/automation.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/compiled.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/d.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/diff.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/elpi.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/felix.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/inferno.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/installers.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/jvm.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/maxima.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/ml.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/mojo.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/nix.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/oberon.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/openscad.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/perl.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/php.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/pony.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/prolog.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/scdoc.cpython-310.pyc +0 -0
evalkit_tf437/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: MarkupSafe
|
| 3 |
+
Version: 2.1.5
|
| 4 |
+
Summary: Safely add untrusted strings to HTML/XML markup.
|
| 5 |
+
Home-page: https://palletsprojects.com/p/markupsafe/
|
| 6 |
+
Maintainer: Pallets
|
| 7 |
+
Maintainer-email: contact@palletsprojects.com
|
| 8 |
+
License: BSD-3-Clause
|
| 9 |
+
Project-URL: Donate, https://palletsprojects.com/donate
|
| 10 |
+
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
| 11 |
+
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
| 12 |
+
Project-URL: Source Code, https://github.com/pallets/markupsafe/
|
| 13 |
+
Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
|
| 14 |
+
Project-URL: Chat, https://discord.gg/pallets
|
| 15 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 16 |
+
Classifier: Environment :: Web Environment
|
| 17 |
+
Classifier: Intended Audience :: Developers
|
| 18 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 19 |
+
Classifier: Operating System :: OS Independent
|
| 20 |
+
Classifier: Programming Language :: Python
|
| 21 |
+
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
| 22 |
+
Classifier: Topic :: Text Processing :: Markup :: HTML
|
| 23 |
+
Requires-Python: >=3.7
|
| 24 |
+
Description-Content-Type: text/x-rst
|
| 25 |
+
License-File: LICENSE.rst
|
| 26 |
+
|
| 27 |
+
MarkupSafe
|
| 28 |
+
==========
|
| 29 |
+
|
| 30 |
+
MarkupSafe implements a text object that escapes characters so it is
|
| 31 |
+
safe to use in HTML and XML. Characters that have special meanings are
|
| 32 |
+
replaced so that they display as the actual characters. This mitigates
|
| 33 |
+
injection attacks, meaning untrusted user input can safely be displayed
|
| 34 |
+
on a page.
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
Installing
|
| 38 |
+
----------
|
| 39 |
+
|
| 40 |
+
Install and update using `pip`_:
|
| 41 |
+
|
| 42 |
+
.. code-block:: text
|
| 43 |
+
|
| 44 |
+
pip install -U MarkupSafe
|
| 45 |
+
|
| 46 |
+
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
Examples
|
| 50 |
+
--------
|
| 51 |
+
|
| 52 |
+
.. code-block:: pycon
|
| 53 |
+
|
| 54 |
+
>>> from markupsafe import Markup, escape
|
| 55 |
+
|
| 56 |
+
>>> # escape replaces special characters and wraps in Markup
|
| 57 |
+
>>> escape("<script>alert(document.cookie);</script>")
|
| 58 |
+
Markup('<script>alert(document.cookie);</script>')
|
| 59 |
+
|
| 60 |
+
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
| 61 |
+
>>> Markup("<strong>Hello</strong>")
|
| 62 |
+
Markup('<strong>hello</strong>')
|
| 63 |
+
|
| 64 |
+
>>> escape(Markup("<strong>Hello</strong>"))
|
| 65 |
+
Markup('<strong>hello</strong>')
|
| 66 |
+
|
| 67 |
+
>>> # Markup is a str subclass
|
| 68 |
+
>>> # methods and operators escape their arguments
|
| 69 |
+
>>> template = Markup("Hello <em>{name}</em>")
|
| 70 |
+
>>> template.format(name='"World"')
|
| 71 |
+
Markup('Hello <em>"World"</em>')
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
Donate
|
| 75 |
+
------
|
| 76 |
+
|
| 77 |
+
The Pallets organization develops and supports MarkupSafe and other
|
| 78 |
+
popular packages. In order to grow the community of contributors and
|
| 79 |
+
users, and allow the maintainers to devote more time to the projects,
|
| 80 |
+
`please donate today`_.
|
| 81 |
+
|
| 82 |
+
.. _please donate today: https://palletsprojects.com/donate
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
Links
|
| 86 |
+
-----
|
| 87 |
+
|
| 88 |
+
- Documentation: https://markupsafe.palletsprojects.com/
|
| 89 |
+
- Changes: https://markupsafe.palletsprojects.com/changes/
|
| 90 |
+
- PyPI Releases: https://pypi.org/project/MarkupSafe/
|
| 91 |
+
- Source Code: https://github.com/pallets/markupsafe/
|
| 92 |
+
- Issue Tracker: https://github.com/pallets/markupsafe/issues/
|
| 93 |
+
- Chat: https://discord.gg/pallets
|
evalkit_tf437/lib/python3.10/site-packages/fastapi-0.103.2.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_tf437/lib/python3.10/site-packages/fastapi-0.103.2.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The MIT License (MIT)
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2018 Sebastián Ramírez
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in
|
| 13 |
+
all copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
| 21 |
+
THE SOFTWARE.
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/etree.cpython-310.pyc
ADDED
|
Binary file (9.82 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/psCharStrings.cpython-310.pyc
ADDED
|
Binary file (41.5 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/testTools.cpython-310.pyc
ADDED
|
Binary file (9.41 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/plistlib/__init__.py
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections.abc
|
| 2 |
+
import re
|
| 3 |
+
from typing import (
|
| 4 |
+
Any,
|
| 5 |
+
Callable,
|
| 6 |
+
Dict,
|
| 7 |
+
List,
|
| 8 |
+
Mapping,
|
| 9 |
+
MutableMapping,
|
| 10 |
+
Optional,
|
| 11 |
+
Sequence,
|
| 12 |
+
Type,
|
| 13 |
+
Union,
|
| 14 |
+
IO,
|
| 15 |
+
)
|
| 16 |
+
import warnings
|
| 17 |
+
from io import BytesIO
|
| 18 |
+
from datetime import datetime
|
| 19 |
+
from base64 import b64encode, b64decode
|
| 20 |
+
from numbers import Integral
|
| 21 |
+
from types import SimpleNamespace
|
| 22 |
+
from functools import singledispatch
|
| 23 |
+
|
| 24 |
+
from fontTools.misc import etree
|
| 25 |
+
|
| 26 |
+
from fontTools.misc.textTools import tostr
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# By default, we
|
| 30 |
+
# - deserialize <data> elements as bytes and
|
| 31 |
+
# - serialize bytes as <data> elements.
|
| 32 |
+
# Before, on Python 2, we
|
| 33 |
+
# - deserialized <data> elements as plistlib.Data objects, in order to
|
| 34 |
+
# distinguish them from the built-in str type (which is bytes on python2)
|
| 35 |
+
# - serialized bytes as <string> elements (they must have only contained
|
| 36 |
+
# ASCII characters in this case)
|
| 37 |
+
# You can pass use_builtin_types=[True|False] to the load/dump etc. functions
|
| 38 |
+
# to enforce a specific treatment.
|
| 39 |
+
# NOTE that unicode type always maps to <string> element, and plistlib.Data
|
| 40 |
+
# always maps to <data> element, regardless of use_builtin_types.
|
| 41 |
+
USE_BUILTIN_TYPES = True
|
| 42 |
+
|
| 43 |
+
XML_DECLARATION = b"""<?xml version='1.0' encoding='UTF-8'?>"""
|
| 44 |
+
|
| 45 |
+
PLIST_DOCTYPE = (
|
| 46 |
+
b'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" '
|
| 47 |
+
b'"http://www.apple.com/DTDs/PropertyList-1.0.dtd">'
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
# Date should conform to a subset of ISO 8601:
|
| 52 |
+
# YYYY '-' MM '-' DD 'T' HH ':' MM ':' SS 'Z'
|
| 53 |
+
_date_parser = re.compile(
|
| 54 |
+
r"(?P<year>\d\d\d\d)"
|
| 55 |
+
r"(?:-(?P<month>\d\d)"
|
| 56 |
+
r"(?:-(?P<day>\d\d)"
|
| 57 |
+
r"(?:T(?P<hour>\d\d)"
|
| 58 |
+
r"(?::(?P<minute>\d\d)"
|
| 59 |
+
r"(?::(?P<second>\d\d))"
|
| 60 |
+
r"?)?)?)?)?Z",
|
| 61 |
+
re.ASCII,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _date_from_string(s: str) -> datetime:
|
| 66 |
+
order = ("year", "month", "day", "hour", "minute", "second")
|
| 67 |
+
m = _date_parser.match(s)
|
| 68 |
+
if m is None:
|
| 69 |
+
raise ValueError(f"Expected ISO 8601 date string, but got '{s:r}'.")
|
| 70 |
+
gd = m.groupdict()
|
| 71 |
+
lst = []
|
| 72 |
+
for key in order:
|
| 73 |
+
val = gd[key]
|
| 74 |
+
if val is None:
|
| 75 |
+
break
|
| 76 |
+
lst.append(int(val))
|
| 77 |
+
# NOTE: mypy doesn't know that lst is 6 elements long.
|
| 78 |
+
return datetime(*lst) # type:ignore
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _date_to_string(d: datetime) -> str:
|
| 82 |
+
return "%04d-%02d-%02dT%02d:%02d:%02dZ" % (
|
| 83 |
+
d.year,
|
| 84 |
+
d.month,
|
| 85 |
+
d.day,
|
| 86 |
+
d.hour,
|
| 87 |
+
d.minute,
|
| 88 |
+
d.second,
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class Data:
|
| 93 |
+
"""Represents binary data when ``use_builtin_types=False.``
|
| 94 |
+
|
| 95 |
+
This class wraps binary data loaded from a plist file when the
|
| 96 |
+
``use_builtin_types`` argument to the loading function (:py:func:`fromtree`,
|
| 97 |
+
:py:func:`load`, :py:func:`loads`) is false.
|
| 98 |
+
|
| 99 |
+
The actual binary data is retrieved using the ``data`` attribute.
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
def __init__(self, data: bytes) -> None:
|
| 103 |
+
if not isinstance(data, bytes):
|
| 104 |
+
raise TypeError("Expected bytes, found %s" % type(data).__name__)
|
| 105 |
+
self.data = data
|
| 106 |
+
|
| 107 |
+
@classmethod
|
| 108 |
+
def fromBase64(cls, data: Union[bytes, str]) -> "Data":
|
| 109 |
+
return cls(b64decode(data))
|
| 110 |
+
|
| 111 |
+
def asBase64(self, maxlinelength: int = 76, indent_level: int = 1) -> bytes:
|
| 112 |
+
return _encode_base64(
|
| 113 |
+
self.data, maxlinelength=maxlinelength, indent_level=indent_level
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
def __eq__(self, other: Any) -> bool:
|
| 117 |
+
if isinstance(other, self.__class__):
|
| 118 |
+
return self.data == other.data
|
| 119 |
+
elif isinstance(other, bytes):
|
| 120 |
+
return self.data == other
|
| 121 |
+
else:
|
| 122 |
+
return NotImplemented
|
| 123 |
+
|
| 124 |
+
def __repr__(self) -> str:
|
| 125 |
+
return "%s(%s)" % (self.__class__.__name__, repr(self.data))
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _encode_base64(
|
| 129 |
+
data: bytes, maxlinelength: Optional[int] = 76, indent_level: int = 1
|
| 130 |
+
) -> bytes:
|
| 131 |
+
data = b64encode(data)
|
| 132 |
+
if data and maxlinelength:
|
| 133 |
+
# split into multiple lines right-justified to 'maxlinelength' chars
|
| 134 |
+
indent = b"\n" + b" " * indent_level
|
| 135 |
+
max_length = max(16, maxlinelength - len(indent))
|
| 136 |
+
chunks = []
|
| 137 |
+
for i in range(0, len(data), max_length):
|
| 138 |
+
chunks.append(indent)
|
| 139 |
+
chunks.append(data[i : i + max_length])
|
| 140 |
+
chunks.append(indent)
|
| 141 |
+
data = b"".join(chunks)
|
| 142 |
+
return data
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
# Mypy does not support recursive type aliases as of 0.782, Pylance does.
|
| 146 |
+
# https://github.com/python/mypy/issues/731
|
| 147 |
+
# https://devblogs.microsoft.com/python/pylance-introduces-five-new-features-that-enable-type-magic-for-python-developers/#1-support-for-recursive-type-aliases
|
| 148 |
+
PlistEncodable = Union[
|
| 149 |
+
bool,
|
| 150 |
+
bytes,
|
| 151 |
+
Data,
|
| 152 |
+
datetime,
|
| 153 |
+
float,
|
| 154 |
+
Integral,
|
| 155 |
+
Mapping[str, Any],
|
| 156 |
+
Sequence[Any],
|
| 157 |
+
str,
|
| 158 |
+
]
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
class PlistTarget:
|
| 162 |
+
"""Event handler using the ElementTree Target API that can be
|
| 163 |
+
passed to a XMLParser to produce property list objects from XML.
|
| 164 |
+
It is based on the CPython plistlib module's _PlistParser class,
|
| 165 |
+
but does not use the expat parser.
|
| 166 |
+
|
| 167 |
+
>>> from fontTools.misc import etree
|
| 168 |
+
>>> parser = etree.XMLParser(target=PlistTarget())
|
| 169 |
+
>>> result = etree.XML(
|
| 170 |
+
... "<dict>"
|
| 171 |
+
... " <key>something</key>"
|
| 172 |
+
... " <string>blah</string>"
|
| 173 |
+
... "</dict>",
|
| 174 |
+
... parser=parser)
|
| 175 |
+
>>> result == {"something": "blah"}
|
| 176 |
+
True
|
| 177 |
+
|
| 178 |
+
Links:
|
| 179 |
+
https://github.com/python/cpython/blob/main/Lib/plistlib.py
|
| 180 |
+
http://lxml.de/parsing.html#the-target-parser-interface
|
| 181 |
+
"""
|
| 182 |
+
|
| 183 |
+
def __init__(
|
| 184 |
+
self,
|
| 185 |
+
use_builtin_types: Optional[bool] = None,
|
| 186 |
+
dict_type: Type[MutableMapping[str, Any]] = dict,
|
| 187 |
+
) -> None:
|
| 188 |
+
self.stack: List[PlistEncodable] = []
|
| 189 |
+
self.current_key: Optional[str] = None
|
| 190 |
+
self.root: Optional[PlistEncodable] = None
|
| 191 |
+
if use_builtin_types is None:
|
| 192 |
+
self._use_builtin_types = USE_BUILTIN_TYPES
|
| 193 |
+
else:
|
| 194 |
+
if use_builtin_types is False:
|
| 195 |
+
warnings.warn(
|
| 196 |
+
"Setting use_builtin_types to False is deprecated and will be "
|
| 197 |
+
"removed soon.",
|
| 198 |
+
DeprecationWarning,
|
| 199 |
+
)
|
| 200 |
+
self._use_builtin_types = use_builtin_types
|
| 201 |
+
self._dict_type = dict_type
|
| 202 |
+
|
| 203 |
+
def start(self, tag: str, attrib: Mapping[str, str]) -> None:
|
| 204 |
+
self._data: List[str] = []
|
| 205 |
+
handler = _TARGET_START_HANDLERS.get(tag)
|
| 206 |
+
if handler is not None:
|
| 207 |
+
handler(self)
|
| 208 |
+
|
| 209 |
+
def end(self, tag: str) -> None:
|
| 210 |
+
handler = _TARGET_END_HANDLERS.get(tag)
|
| 211 |
+
if handler is not None:
|
| 212 |
+
handler(self)
|
| 213 |
+
|
| 214 |
+
def data(self, data: str) -> None:
|
| 215 |
+
self._data.append(data)
|
| 216 |
+
|
| 217 |
+
def close(self) -> PlistEncodable:
|
| 218 |
+
if self.root is None:
|
| 219 |
+
raise ValueError("No root set.")
|
| 220 |
+
return self.root
|
| 221 |
+
|
| 222 |
+
# helpers
|
| 223 |
+
|
| 224 |
+
def add_object(self, value: PlistEncodable) -> None:
|
| 225 |
+
if self.current_key is not None:
|
| 226 |
+
stack_top = self.stack[-1]
|
| 227 |
+
if not isinstance(stack_top, collections.abc.MutableMapping):
|
| 228 |
+
raise ValueError("unexpected element: %r" % stack_top)
|
| 229 |
+
stack_top[self.current_key] = value
|
| 230 |
+
self.current_key = None
|
| 231 |
+
elif not self.stack:
|
| 232 |
+
# this is the root object
|
| 233 |
+
self.root = value
|
| 234 |
+
else:
|
| 235 |
+
stack_top = self.stack[-1]
|
| 236 |
+
if not isinstance(stack_top, list):
|
| 237 |
+
raise ValueError("unexpected element: %r" % stack_top)
|
| 238 |
+
stack_top.append(value)
|
| 239 |
+
|
| 240 |
+
def get_data(self) -> str:
|
| 241 |
+
data = "".join(self._data)
|
| 242 |
+
self._data = []
|
| 243 |
+
return data
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
# event handlers
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
def start_dict(self: PlistTarget) -> None:
|
| 250 |
+
d = self._dict_type()
|
| 251 |
+
self.add_object(d)
|
| 252 |
+
self.stack.append(d)
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
def end_dict(self: PlistTarget) -> None:
|
| 256 |
+
if self.current_key:
|
| 257 |
+
raise ValueError("missing value for key '%s'" % self.current_key)
|
| 258 |
+
self.stack.pop()
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def end_key(self: PlistTarget) -> None:
|
| 262 |
+
if self.current_key or not isinstance(self.stack[-1], collections.abc.Mapping):
|
| 263 |
+
raise ValueError("unexpected key")
|
| 264 |
+
self.current_key = self.get_data()
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def start_array(self: PlistTarget) -> None:
|
| 268 |
+
a: List[PlistEncodable] = []
|
| 269 |
+
self.add_object(a)
|
| 270 |
+
self.stack.append(a)
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
def end_array(self: PlistTarget) -> None:
|
| 274 |
+
self.stack.pop()
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
def end_true(self: PlistTarget) -> None:
|
| 278 |
+
self.add_object(True)
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def end_false(self: PlistTarget) -> None:
|
| 282 |
+
self.add_object(False)
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def end_integer(self: PlistTarget) -> None:
|
| 286 |
+
self.add_object(int(self.get_data()))
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
def end_real(self: PlistTarget) -> None:
|
| 290 |
+
self.add_object(float(self.get_data()))
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def end_string(self: PlistTarget) -> None:
|
| 294 |
+
self.add_object(self.get_data())
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
def end_data(self: PlistTarget) -> None:
|
| 298 |
+
if self._use_builtin_types:
|
| 299 |
+
self.add_object(b64decode(self.get_data()))
|
| 300 |
+
else:
|
| 301 |
+
self.add_object(Data.fromBase64(self.get_data()))
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def end_date(self: PlistTarget) -> None:
|
| 305 |
+
self.add_object(_date_from_string(self.get_data()))
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
_TARGET_START_HANDLERS: Dict[str, Callable[[PlistTarget], None]] = {
|
| 309 |
+
"dict": start_dict,
|
| 310 |
+
"array": start_array,
|
| 311 |
+
}
|
| 312 |
+
|
| 313 |
+
_TARGET_END_HANDLERS: Dict[str, Callable[[PlistTarget], None]] = {
|
| 314 |
+
"dict": end_dict,
|
| 315 |
+
"array": end_array,
|
| 316 |
+
"key": end_key,
|
| 317 |
+
"true": end_true,
|
| 318 |
+
"false": end_false,
|
| 319 |
+
"integer": end_integer,
|
| 320 |
+
"real": end_real,
|
| 321 |
+
"string": end_string,
|
| 322 |
+
"data": end_data,
|
| 323 |
+
"date": end_date,
|
| 324 |
+
}
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
# functions to build element tree from plist data
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
def _string_element(value: str, ctx: SimpleNamespace) -> etree.Element:
|
| 331 |
+
el = etree.Element("string")
|
| 332 |
+
el.text = value
|
| 333 |
+
return el
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def _bool_element(value: bool, ctx: SimpleNamespace) -> etree.Element:
|
| 337 |
+
if value:
|
| 338 |
+
return etree.Element("true")
|
| 339 |
+
return etree.Element("false")
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def _integer_element(value: int, ctx: SimpleNamespace) -> etree.Element:
|
| 343 |
+
if -1 << 63 <= value < 1 << 64:
|
| 344 |
+
el = etree.Element("integer")
|
| 345 |
+
el.text = "%d" % value
|
| 346 |
+
return el
|
| 347 |
+
raise OverflowError(value)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def _real_element(value: float, ctx: SimpleNamespace) -> etree.Element:
|
| 351 |
+
el = etree.Element("real")
|
| 352 |
+
el.text = repr(value)
|
| 353 |
+
return el
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _dict_element(
|
| 357 |
+
d: Mapping[str, PlistEncodable], ctx: SimpleNamespace
|
| 358 |
+
) -> etree.Element:
|
| 359 |
+
el = etree.Element("dict")
|
| 360 |
+
items = d.items()
|
| 361 |
+
if ctx.sort_keys:
|
| 362 |
+
items = sorted(items) # type: ignore
|
| 363 |
+
ctx.indent_level += 1
|
| 364 |
+
for key, value in items:
|
| 365 |
+
if not isinstance(key, str):
|
| 366 |
+
if ctx.skipkeys:
|
| 367 |
+
continue
|
| 368 |
+
raise TypeError("keys must be strings")
|
| 369 |
+
k = etree.SubElement(el, "key")
|
| 370 |
+
k.text = tostr(key, "utf-8")
|
| 371 |
+
el.append(_make_element(value, ctx))
|
| 372 |
+
ctx.indent_level -= 1
|
| 373 |
+
return el
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def _array_element(
|
| 377 |
+
array: Sequence[PlistEncodable], ctx: SimpleNamespace
|
| 378 |
+
) -> etree.Element:
|
| 379 |
+
el = etree.Element("array")
|
| 380 |
+
if len(array) == 0:
|
| 381 |
+
return el
|
| 382 |
+
ctx.indent_level += 1
|
| 383 |
+
for value in array:
|
| 384 |
+
el.append(_make_element(value, ctx))
|
| 385 |
+
ctx.indent_level -= 1
|
| 386 |
+
return el
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
def _date_element(date: datetime, ctx: SimpleNamespace) -> etree.Element:
|
| 390 |
+
el = etree.Element("date")
|
| 391 |
+
el.text = _date_to_string(date)
|
| 392 |
+
return el
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def _data_element(data: bytes, ctx: SimpleNamespace) -> etree.Element:
|
| 396 |
+
el = etree.Element("data")
|
| 397 |
+
# NOTE: mypy is confused about whether el.text should be str or bytes.
|
| 398 |
+
el.text = _encode_base64( # type: ignore
|
| 399 |
+
data,
|
| 400 |
+
maxlinelength=(76 if ctx.pretty_print else None),
|
| 401 |
+
indent_level=ctx.indent_level,
|
| 402 |
+
)
|
| 403 |
+
return el
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def _string_or_data_element(raw_bytes: bytes, ctx: SimpleNamespace) -> etree.Element:
|
| 407 |
+
if ctx.use_builtin_types:
|
| 408 |
+
return _data_element(raw_bytes, ctx)
|
| 409 |
+
else:
|
| 410 |
+
try:
|
| 411 |
+
string = raw_bytes.decode(encoding="ascii", errors="strict")
|
| 412 |
+
except UnicodeDecodeError:
|
| 413 |
+
raise ValueError(
|
| 414 |
+
"invalid non-ASCII bytes; use unicode string instead: %r" % raw_bytes
|
| 415 |
+
)
|
| 416 |
+
return _string_element(string, ctx)
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
# The following is probably not entirely correct. The signature should take `Any`
|
| 420 |
+
# and return `NoReturn`. At the time of this writing, neither mypy nor Pyright
|
| 421 |
+
# can deal with singledispatch properly and will apply the signature of the base
|
| 422 |
+
# function to all others. Being slightly dishonest makes it type-check and return
|
| 423 |
+
# usable typing information for the optimistic case.
|
| 424 |
+
@singledispatch
|
| 425 |
+
def _make_element(value: PlistEncodable, ctx: SimpleNamespace) -> etree.Element:
|
| 426 |
+
raise TypeError("unsupported type: %s" % type(value))
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
_make_element.register(str)(_string_element)
|
| 430 |
+
_make_element.register(bool)(_bool_element)
|
| 431 |
+
_make_element.register(Integral)(_integer_element)
|
| 432 |
+
_make_element.register(float)(_real_element)
|
| 433 |
+
_make_element.register(collections.abc.Mapping)(_dict_element)
|
| 434 |
+
_make_element.register(list)(_array_element)
|
| 435 |
+
_make_element.register(tuple)(_array_element)
|
| 436 |
+
_make_element.register(datetime)(_date_element)
|
| 437 |
+
_make_element.register(bytes)(_string_or_data_element)
|
| 438 |
+
_make_element.register(bytearray)(_data_element)
|
| 439 |
+
_make_element.register(Data)(lambda v, ctx: _data_element(v.data, ctx))
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
# Public functions to create element tree from plist-compatible python
|
| 443 |
+
# data structures and viceversa, for use when (de)serializing GLIF xml.
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def totree(
|
| 447 |
+
value: PlistEncodable,
|
| 448 |
+
sort_keys: bool = True,
|
| 449 |
+
skipkeys: bool = False,
|
| 450 |
+
use_builtin_types: Optional[bool] = None,
|
| 451 |
+
pretty_print: bool = True,
|
| 452 |
+
indent_level: int = 1,
|
| 453 |
+
) -> etree.Element:
|
| 454 |
+
"""Convert a value derived from a plist into an XML tree.
|
| 455 |
+
|
| 456 |
+
Args:
|
| 457 |
+
value: Any kind of value to be serialized to XML.
|
| 458 |
+
sort_keys: Whether keys of dictionaries should be sorted.
|
| 459 |
+
skipkeys (bool): Whether to silently skip non-string dictionary
|
| 460 |
+
keys.
|
| 461 |
+
use_builtin_types (bool): If true, byte strings will be
|
| 462 |
+
encoded in Base-64 and wrapped in a ``data`` tag; if
|
| 463 |
+
false, they will be either stored as ASCII strings or an
|
| 464 |
+
exception raised if they cannot be decoded as such. Defaults
|
| 465 |
+
to ``True`` if not present. Deprecated.
|
| 466 |
+
pretty_print (bool): Whether to indent the output.
|
| 467 |
+
indent_level (int): Level of indentation when serializing.
|
| 468 |
+
|
| 469 |
+
Returns: an ``etree`` ``Element`` object.
|
| 470 |
+
|
| 471 |
+
Raises:
|
| 472 |
+
``TypeError``
|
| 473 |
+
if non-string dictionary keys are serialized
|
| 474 |
+
and ``skipkeys`` is false.
|
| 475 |
+
``ValueError``
|
| 476 |
+
if non-ASCII binary data is present
|
| 477 |
+
and `use_builtin_types` is false.
|
| 478 |
+
"""
|
| 479 |
+
if use_builtin_types is None:
|
| 480 |
+
use_builtin_types = USE_BUILTIN_TYPES
|
| 481 |
+
else:
|
| 482 |
+
use_builtin_types = use_builtin_types
|
| 483 |
+
context = SimpleNamespace(
|
| 484 |
+
sort_keys=sort_keys,
|
| 485 |
+
skipkeys=skipkeys,
|
| 486 |
+
use_builtin_types=use_builtin_types,
|
| 487 |
+
pretty_print=pretty_print,
|
| 488 |
+
indent_level=indent_level,
|
| 489 |
+
)
|
| 490 |
+
return _make_element(value, context)
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
def fromtree(
|
| 494 |
+
tree: etree.Element,
|
| 495 |
+
use_builtin_types: Optional[bool] = None,
|
| 496 |
+
dict_type: Type[MutableMapping[str, Any]] = dict,
|
| 497 |
+
) -> Any:
|
| 498 |
+
"""Convert an XML tree to a plist structure.
|
| 499 |
+
|
| 500 |
+
Args:
|
| 501 |
+
tree: An ``etree`` ``Element``.
|
| 502 |
+
use_builtin_types: If True, binary data is deserialized to
|
| 503 |
+
bytes strings. If False, it is wrapped in :py:class:`Data`
|
| 504 |
+
objects. Defaults to True if not provided. Deprecated.
|
| 505 |
+
dict_type: What type to use for dictionaries.
|
| 506 |
+
|
| 507 |
+
Returns: An object (usually a dictionary).
|
| 508 |
+
"""
|
| 509 |
+
target = PlistTarget(use_builtin_types=use_builtin_types, dict_type=dict_type)
|
| 510 |
+
for action, element in etree.iterwalk(tree, events=("start", "end")):
|
| 511 |
+
if action == "start":
|
| 512 |
+
target.start(element.tag, element.attrib)
|
| 513 |
+
elif action == "end":
|
| 514 |
+
# if there are no children, parse the leaf's data
|
| 515 |
+
if not len(element):
|
| 516 |
+
# always pass str, not None
|
| 517 |
+
target.data(element.text or "")
|
| 518 |
+
target.end(element.tag)
|
| 519 |
+
return target.close()
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
# python3 plistlib API
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def load(
|
| 526 |
+
fp: IO[bytes],
|
| 527 |
+
use_builtin_types: Optional[bool] = None,
|
| 528 |
+
dict_type: Type[MutableMapping[str, Any]] = dict,
|
| 529 |
+
) -> Any:
|
| 530 |
+
"""Load a plist file into an object.
|
| 531 |
+
|
| 532 |
+
Args:
|
| 533 |
+
fp: An opened file.
|
| 534 |
+
use_builtin_types: If True, binary data is deserialized to
|
| 535 |
+
bytes strings. If False, it is wrapped in :py:class:`Data`
|
| 536 |
+
objects. Defaults to True if not provided. Deprecated.
|
| 537 |
+
dict_type: What type to use for dictionaries.
|
| 538 |
+
|
| 539 |
+
Returns:
|
| 540 |
+
An object (usually a dictionary) representing the top level of
|
| 541 |
+
the plist file.
|
| 542 |
+
"""
|
| 543 |
+
|
| 544 |
+
if not hasattr(fp, "read"):
|
| 545 |
+
raise AttributeError("'%s' object has no attribute 'read'" % type(fp).__name__)
|
| 546 |
+
target = PlistTarget(use_builtin_types=use_builtin_types, dict_type=dict_type)
|
| 547 |
+
parser = etree.XMLParser(target=target)
|
| 548 |
+
result = etree.parse(fp, parser=parser)
|
| 549 |
+
# lxml returns the target object directly, while ElementTree wraps
|
| 550 |
+
# it as the root of an ElementTree object
|
| 551 |
+
try:
|
| 552 |
+
return result.getroot()
|
| 553 |
+
except AttributeError:
|
| 554 |
+
return result
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
def loads(
|
| 558 |
+
value: bytes,
|
| 559 |
+
use_builtin_types: Optional[bool] = None,
|
| 560 |
+
dict_type: Type[MutableMapping[str, Any]] = dict,
|
| 561 |
+
) -> Any:
|
| 562 |
+
"""Load a plist file from a string into an object.
|
| 563 |
+
|
| 564 |
+
Args:
|
| 565 |
+
value: A bytes string containing a plist.
|
| 566 |
+
use_builtin_types: If True, binary data is deserialized to
|
| 567 |
+
bytes strings. If False, it is wrapped in :py:class:`Data`
|
| 568 |
+
objects. Defaults to True if not provided. Deprecated.
|
| 569 |
+
dict_type: What type to use for dictionaries.
|
| 570 |
+
|
| 571 |
+
Returns:
|
| 572 |
+
An object (usually a dictionary) representing the top level of
|
| 573 |
+
the plist file.
|
| 574 |
+
"""
|
| 575 |
+
|
| 576 |
+
fp = BytesIO(value)
|
| 577 |
+
return load(fp, use_builtin_types=use_builtin_types, dict_type=dict_type)
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
def dump(
|
| 581 |
+
value: PlistEncodable,
|
| 582 |
+
fp: IO[bytes],
|
| 583 |
+
sort_keys: bool = True,
|
| 584 |
+
skipkeys: bool = False,
|
| 585 |
+
use_builtin_types: Optional[bool] = None,
|
| 586 |
+
pretty_print: bool = True,
|
| 587 |
+
) -> None:
|
| 588 |
+
"""Write a Python object to a plist file.
|
| 589 |
+
|
| 590 |
+
Args:
|
| 591 |
+
value: An object to write.
|
| 592 |
+
fp: A file opened for writing.
|
| 593 |
+
sort_keys (bool): Whether keys of dictionaries should be sorted.
|
| 594 |
+
skipkeys (bool): Whether to silently skip non-string dictionary
|
| 595 |
+
keys.
|
| 596 |
+
use_builtin_types (bool): If true, byte strings will be
|
| 597 |
+
encoded in Base-64 and wrapped in a ``data`` tag; if
|
| 598 |
+
false, they will be either stored as ASCII strings or an
|
| 599 |
+
exception raised if they cannot be represented. Defaults
|
| 600 |
+
pretty_print (bool): Whether to indent the output.
|
| 601 |
+
indent_level (int): Level of indentation when serializing.
|
| 602 |
+
|
| 603 |
+
Raises:
|
| 604 |
+
``TypeError``
|
| 605 |
+
if non-string dictionary keys are serialized
|
| 606 |
+
and ``skipkeys`` is false.
|
| 607 |
+
``ValueError``
|
| 608 |
+
if non-representable binary data is present
|
| 609 |
+
and `use_builtin_types` is false.
|
| 610 |
+
"""
|
| 611 |
+
|
| 612 |
+
if not hasattr(fp, "write"):
|
| 613 |
+
raise AttributeError("'%s' object has no attribute 'write'" % type(fp).__name__)
|
| 614 |
+
root = etree.Element("plist", version="1.0")
|
| 615 |
+
el = totree(
|
| 616 |
+
value,
|
| 617 |
+
sort_keys=sort_keys,
|
| 618 |
+
skipkeys=skipkeys,
|
| 619 |
+
use_builtin_types=use_builtin_types,
|
| 620 |
+
pretty_print=pretty_print,
|
| 621 |
+
)
|
| 622 |
+
root.append(el)
|
| 623 |
+
tree = etree.ElementTree(root)
|
| 624 |
+
# we write the doctype ourselves instead of using the 'doctype' argument
|
| 625 |
+
# of 'write' method, becuse lxml will force adding a '\n' even when
|
| 626 |
+
# pretty_print is False.
|
| 627 |
+
if pretty_print:
|
| 628 |
+
header = b"\n".join((XML_DECLARATION, PLIST_DOCTYPE, b""))
|
| 629 |
+
else:
|
| 630 |
+
header = XML_DECLARATION + PLIST_DOCTYPE
|
| 631 |
+
fp.write(header)
|
| 632 |
+
tree.write( # type: ignore
|
| 633 |
+
fp,
|
| 634 |
+
encoding="utf-8",
|
| 635 |
+
pretty_print=pretty_print,
|
| 636 |
+
xml_declaration=False,
|
| 637 |
+
)
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
def dumps(
|
| 641 |
+
value: PlistEncodable,
|
| 642 |
+
sort_keys: bool = True,
|
| 643 |
+
skipkeys: bool = False,
|
| 644 |
+
use_builtin_types: Optional[bool] = None,
|
| 645 |
+
pretty_print: bool = True,
|
| 646 |
+
) -> bytes:
|
| 647 |
+
"""Write a Python object to a string in plist format.
|
| 648 |
+
|
| 649 |
+
Args:
|
| 650 |
+
value: An object to write.
|
| 651 |
+
sort_keys (bool): Whether keys of dictionaries should be sorted.
|
| 652 |
+
skipkeys (bool): Whether to silently skip non-string dictionary
|
| 653 |
+
keys.
|
| 654 |
+
use_builtin_types (bool): If true, byte strings will be
|
| 655 |
+
encoded in Base-64 and wrapped in a ``data`` tag; if
|
| 656 |
+
false, they will be either stored as strings or an
|
| 657 |
+
exception raised if they cannot be represented. Defaults
|
| 658 |
+
pretty_print (bool): Whether to indent the output.
|
| 659 |
+
indent_level (int): Level of indentation when serializing.
|
| 660 |
+
|
| 661 |
+
Returns:
|
| 662 |
+
string: A plist representation of the Python object.
|
| 663 |
+
|
| 664 |
+
Raises:
|
| 665 |
+
``TypeError``
|
| 666 |
+
if non-string dictionary keys are serialized
|
| 667 |
+
and ``skipkeys`` is false.
|
| 668 |
+
``ValueError``
|
| 669 |
+
if non-representable binary data is present
|
| 670 |
+
and `use_builtin_types` is false.
|
| 671 |
+
"""
|
| 672 |
+
fp = BytesIO()
|
| 673 |
+
dump(
|
| 674 |
+
value,
|
| 675 |
+
fp,
|
| 676 |
+
sort_keys=sort_keys,
|
| 677 |
+
skipkeys=skipkeys,
|
| 678 |
+
use_builtin_types=use_builtin_types,
|
| 679 |
+
pretty_print=pretty_print,
|
| 680 |
+
)
|
| 681 |
+
return fp.getvalue()
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (311 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/interpolatable.py
ADDED
|
@@ -0,0 +1,1148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tool to find wrong contour order between different masters, and
|
| 3 |
+
other interpolatability (or lack thereof) issues.
|
| 4 |
+
|
| 5 |
+
Call as:
|
| 6 |
+
$ fonttools varLib.interpolatable font1 font2 ...
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from .interpolatableHelpers import *
|
| 10 |
+
from .interpolatableTestContourOrder import test_contour_order
|
| 11 |
+
from .interpolatableTestStartingPoint import test_starting_point
|
| 12 |
+
from fontTools.pens.recordingPen import (
|
| 13 |
+
RecordingPen,
|
| 14 |
+
DecomposingRecordingPen,
|
| 15 |
+
lerpRecordings,
|
| 16 |
+
)
|
| 17 |
+
from fontTools.pens.transformPen import TransformPen
|
| 18 |
+
from fontTools.pens.statisticsPen import StatisticsPen, StatisticsControlPen
|
| 19 |
+
from fontTools.pens.momentsPen import OpenContourError
|
| 20 |
+
from fontTools.varLib.models import piecewiseLinearMap, normalizeLocation
|
| 21 |
+
from fontTools.misc.fixedTools import floatToFixedToStr
|
| 22 |
+
from fontTools.misc.transform import Transform
|
| 23 |
+
from collections import defaultdict
|
| 24 |
+
from types import SimpleNamespace
|
| 25 |
+
from functools import wraps
|
| 26 |
+
from pprint import pformat
|
| 27 |
+
from math import sqrt, atan2, pi
|
| 28 |
+
import logging
|
| 29 |
+
import os
|
| 30 |
+
|
| 31 |
+
log = logging.getLogger("fontTools.varLib.interpolatable")
|
| 32 |
+
|
| 33 |
+
DEFAULT_TOLERANCE = 0.95
|
| 34 |
+
DEFAULT_KINKINESS = 0.5
|
| 35 |
+
DEFAULT_KINKINESS_LENGTH = 0.002 # ratio of UPEM
|
| 36 |
+
DEFAULT_UPEM = 1000
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class Glyph:
|
| 40 |
+
ITEMS = (
|
| 41 |
+
"recordings",
|
| 42 |
+
"greenStats",
|
| 43 |
+
"controlStats",
|
| 44 |
+
"greenVectors",
|
| 45 |
+
"controlVectors",
|
| 46 |
+
"nodeTypes",
|
| 47 |
+
"isomorphisms",
|
| 48 |
+
"points",
|
| 49 |
+
"openContours",
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
def __init__(self, glyphname, glyphset):
|
| 53 |
+
self.name = glyphname
|
| 54 |
+
for item in self.ITEMS:
|
| 55 |
+
setattr(self, item, [])
|
| 56 |
+
self._populate(glyphset)
|
| 57 |
+
|
| 58 |
+
def _fill_in(self, ix):
|
| 59 |
+
for item in self.ITEMS:
|
| 60 |
+
if len(getattr(self, item)) == ix:
|
| 61 |
+
getattr(self, item).append(None)
|
| 62 |
+
|
| 63 |
+
def _populate(self, glyphset):
|
| 64 |
+
glyph = glyphset[self.name]
|
| 65 |
+
self.doesnt_exist = glyph is None
|
| 66 |
+
if self.doesnt_exist:
|
| 67 |
+
return
|
| 68 |
+
|
| 69 |
+
perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
|
| 70 |
+
try:
|
| 71 |
+
glyph.draw(perContourPen, outputImpliedClosingLine=True)
|
| 72 |
+
except TypeError:
|
| 73 |
+
glyph.draw(perContourPen)
|
| 74 |
+
self.recordings = perContourPen.value
|
| 75 |
+
del perContourPen
|
| 76 |
+
|
| 77 |
+
for ix, contour in enumerate(self.recordings):
|
| 78 |
+
nodeTypes = [op for op, arg in contour.value]
|
| 79 |
+
self.nodeTypes.append(nodeTypes)
|
| 80 |
+
|
| 81 |
+
greenStats = StatisticsPen(glyphset=glyphset)
|
| 82 |
+
controlStats = StatisticsControlPen(glyphset=glyphset)
|
| 83 |
+
try:
|
| 84 |
+
contour.replay(greenStats)
|
| 85 |
+
contour.replay(controlStats)
|
| 86 |
+
self.openContours.append(False)
|
| 87 |
+
except OpenContourError as e:
|
| 88 |
+
self.openContours.append(True)
|
| 89 |
+
self._fill_in(ix)
|
| 90 |
+
continue
|
| 91 |
+
self.greenStats.append(greenStats)
|
| 92 |
+
self.controlStats.append(controlStats)
|
| 93 |
+
self.greenVectors.append(contour_vector_from_stats(greenStats))
|
| 94 |
+
self.controlVectors.append(contour_vector_from_stats(controlStats))
|
| 95 |
+
|
| 96 |
+
# Check starting point
|
| 97 |
+
if nodeTypes[0] == "addComponent":
|
| 98 |
+
self._fill_in(ix)
|
| 99 |
+
continue
|
| 100 |
+
|
| 101 |
+
assert nodeTypes[0] == "moveTo"
|
| 102 |
+
assert nodeTypes[-1] in ("closePath", "endPath")
|
| 103 |
+
points = SimpleRecordingPointPen()
|
| 104 |
+
converter = SegmentToPointPen(points, False)
|
| 105 |
+
contour.replay(converter)
|
| 106 |
+
# points.value is a list of pt,bool where bool is true if on-curve and false if off-curve;
|
| 107 |
+
# now check all rotations and mirror-rotations of the contour and build list of isomorphic
|
| 108 |
+
# possible starting points.
|
| 109 |
+
self.points.append(points.value)
|
| 110 |
+
|
| 111 |
+
isomorphisms = []
|
| 112 |
+
self.isomorphisms.append(isomorphisms)
|
| 113 |
+
|
| 114 |
+
# Add rotations
|
| 115 |
+
add_isomorphisms(points.value, isomorphisms, False)
|
| 116 |
+
# Add mirrored rotations
|
| 117 |
+
add_isomorphisms(points.value, isomorphisms, True)
|
| 118 |
+
|
| 119 |
+
def draw(self, pen, countor_idx=None):
|
| 120 |
+
if countor_idx is None:
|
| 121 |
+
for contour in self.recordings:
|
| 122 |
+
contour.draw(pen)
|
| 123 |
+
else:
|
| 124 |
+
self.recordings[countor_idx].draw(pen)
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def test_gen(
|
| 128 |
+
glyphsets,
|
| 129 |
+
glyphs=None,
|
| 130 |
+
names=None,
|
| 131 |
+
ignore_missing=False,
|
| 132 |
+
*,
|
| 133 |
+
locations=None,
|
| 134 |
+
tolerance=DEFAULT_TOLERANCE,
|
| 135 |
+
kinkiness=DEFAULT_KINKINESS,
|
| 136 |
+
upem=DEFAULT_UPEM,
|
| 137 |
+
show_all=False,
|
| 138 |
+
discrete_axes=[],
|
| 139 |
+
):
|
| 140 |
+
if tolerance >= 10:
|
| 141 |
+
tolerance *= 0.01
|
| 142 |
+
assert 0 <= tolerance <= 1
|
| 143 |
+
if kinkiness >= 10:
|
| 144 |
+
kinkiness *= 0.01
|
| 145 |
+
assert 0 <= kinkiness
|
| 146 |
+
|
| 147 |
+
names = names or [repr(g) for g in glyphsets]
|
| 148 |
+
|
| 149 |
+
if glyphs is None:
|
| 150 |
+
# `glyphs = glyphsets[0].keys()` is faster, certainly, but doesn't allow for sparse TTFs/OTFs given out of order
|
| 151 |
+
# ... risks the sparse master being the first one, and only processing a subset of the glyphs
|
| 152 |
+
glyphs = {g for glyphset in glyphsets for g in glyphset.keys()}
|
| 153 |
+
|
| 154 |
+
parents, order = find_parents_and_order(
|
| 155 |
+
glyphsets, locations, discrete_axes=discrete_axes
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
def grand_parent(i, glyphname):
|
| 159 |
+
if i is None:
|
| 160 |
+
return None
|
| 161 |
+
i = parents[i]
|
| 162 |
+
if i is None:
|
| 163 |
+
return None
|
| 164 |
+
while parents[i] is not None and glyphsets[i][glyphname] is None:
|
| 165 |
+
i = parents[i]
|
| 166 |
+
return i
|
| 167 |
+
|
| 168 |
+
for glyph_name in glyphs:
|
| 169 |
+
log.info("Testing glyph %s", glyph_name)
|
| 170 |
+
allGlyphs = [Glyph(glyph_name, glyphset) for glyphset in glyphsets]
|
| 171 |
+
if len([1 for glyph in allGlyphs if glyph is not None]) <= 1:
|
| 172 |
+
continue
|
| 173 |
+
for master_idx, (glyph, glyphset, name) in enumerate(
|
| 174 |
+
zip(allGlyphs, glyphsets, names)
|
| 175 |
+
):
|
| 176 |
+
if glyph.doesnt_exist:
|
| 177 |
+
if not ignore_missing:
|
| 178 |
+
yield (
|
| 179 |
+
glyph_name,
|
| 180 |
+
{
|
| 181 |
+
"type": InterpolatableProblem.MISSING,
|
| 182 |
+
"master": name,
|
| 183 |
+
"master_idx": master_idx,
|
| 184 |
+
},
|
| 185 |
+
)
|
| 186 |
+
continue
|
| 187 |
+
|
| 188 |
+
has_open = False
|
| 189 |
+
for ix, open in enumerate(glyph.openContours):
|
| 190 |
+
if not open:
|
| 191 |
+
continue
|
| 192 |
+
has_open = True
|
| 193 |
+
yield (
|
| 194 |
+
glyph_name,
|
| 195 |
+
{
|
| 196 |
+
"type": InterpolatableProblem.OPEN_PATH,
|
| 197 |
+
"master": name,
|
| 198 |
+
"master_idx": master_idx,
|
| 199 |
+
"contour": ix,
|
| 200 |
+
},
|
| 201 |
+
)
|
| 202 |
+
if has_open:
|
| 203 |
+
continue
|
| 204 |
+
|
| 205 |
+
matchings = [None] * len(glyphsets)
|
| 206 |
+
|
| 207 |
+
for m1idx in order:
|
| 208 |
+
glyph1 = allGlyphs[m1idx]
|
| 209 |
+
if glyph1 is None or not glyph1.nodeTypes:
|
| 210 |
+
continue
|
| 211 |
+
m0idx = grand_parent(m1idx, glyph_name)
|
| 212 |
+
if m0idx is None:
|
| 213 |
+
continue
|
| 214 |
+
glyph0 = allGlyphs[m0idx]
|
| 215 |
+
if glyph0 is None or not glyph0.nodeTypes:
|
| 216 |
+
continue
|
| 217 |
+
|
| 218 |
+
#
|
| 219 |
+
# Basic compatibility checks
|
| 220 |
+
#
|
| 221 |
+
|
| 222 |
+
m1 = glyph0.nodeTypes
|
| 223 |
+
m0 = glyph1.nodeTypes
|
| 224 |
+
if len(m0) != len(m1):
|
| 225 |
+
yield (
|
| 226 |
+
glyph_name,
|
| 227 |
+
{
|
| 228 |
+
"type": InterpolatableProblem.PATH_COUNT,
|
| 229 |
+
"master_1": names[m0idx],
|
| 230 |
+
"master_2": names[m1idx],
|
| 231 |
+
"master_1_idx": m0idx,
|
| 232 |
+
"master_2_idx": m1idx,
|
| 233 |
+
"value_1": len(m0),
|
| 234 |
+
"value_2": len(m1),
|
| 235 |
+
},
|
| 236 |
+
)
|
| 237 |
+
continue
|
| 238 |
+
|
| 239 |
+
if m0 != m1:
|
| 240 |
+
for pathIx, (nodes1, nodes2) in enumerate(zip(m0, m1)):
|
| 241 |
+
if nodes1 == nodes2:
|
| 242 |
+
continue
|
| 243 |
+
if len(nodes1) != len(nodes2):
|
| 244 |
+
yield (
|
| 245 |
+
glyph_name,
|
| 246 |
+
{
|
| 247 |
+
"type": InterpolatableProblem.NODE_COUNT,
|
| 248 |
+
"path": pathIx,
|
| 249 |
+
"master_1": names[m0idx],
|
| 250 |
+
"master_2": names[m1idx],
|
| 251 |
+
"master_1_idx": m0idx,
|
| 252 |
+
"master_2_idx": m1idx,
|
| 253 |
+
"value_1": len(nodes1),
|
| 254 |
+
"value_2": len(nodes2),
|
| 255 |
+
},
|
| 256 |
+
)
|
| 257 |
+
continue
|
| 258 |
+
for nodeIx, (n1, n2) in enumerate(zip(nodes1, nodes2)):
|
| 259 |
+
if n1 != n2:
|
| 260 |
+
yield (
|
| 261 |
+
glyph_name,
|
| 262 |
+
{
|
| 263 |
+
"type": InterpolatableProblem.NODE_INCOMPATIBILITY,
|
| 264 |
+
"path": pathIx,
|
| 265 |
+
"node": nodeIx,
|
| 266 |
+
"master_1": names[m0idx],
|
| 267 |
+
"master_2": names[m1idx],
|
| 268 |
+
"master_1_idx": m0idx,
|
| 269 |
+
"master_2_idx": m1idx,
|
| 270 |
+
"value_1": n1,
|
| 271 |
+
"value_2": n2,
|
| 272 |
+
},
|
| 273 |
+
)
|
| 274 |
+
continue
|
| 275 |
+
|
| 276 |
+
#
|
| 277 |
+
# InterpolatableProblem.CONTOUR_ORDER check
|
| 278 |
+
#
|
| 279 |
+
|
| 280 |
+
this_tolerance, matching = test_contour_order(glyph0, glyph1)
|
| 281 |
+
if this_tolerance < tolerance:
|
| 282 |
+
yield (
|
| 283 |
+
glyph_name,
|
| 284 |
+
{
|
| 285 |
+
"type": InterpolatableProblem.CONTOUR_ORDER,
|
| 286 |
+
"master_1": names[m0idx],
|
| 287 |
+
"master_2": names[m1idx],
|
| 288 |
+
"master_1_idx": m0idx,
|
| 289 |
+
"master_2_idx": m1idx,
|
| 290 |
+
"value_1": list(range(len(matching))),
|
| 291 |
+
"value_2": matching,
|
| 292 |
+
"tolerance": this_tolerance,
|
| 293 |
+
},
|
| 294 |
+
)
|
| 295 |
+
matchings[m1idx] = matching
|
| 296 |
+
|
| 297 |
+
#
|
| 298 |
+
# wrong-start-point / weight check
|
| 299 |
+
#
|
| 300 |
+
|
| 301 |
+
m0Isomorphisms = glyph0.isomorphisms
|
| 302 |
+
m1Isomorphisms = glyph1.isomorphisms
|
| 303 |
+
m0Vectors = glyph0.greenVectors
|
| 304 |
+
m1Vectors = glyph1.greenVectors
|
| 305 |
+
recording0 = glyph0.recordings
|
| 306 |
+
recording1 = glyph1.recordings
|
| 307 |
+
|
| 308 |
+
# If contour-order is wrong, adjust it
|
| 309 |
+
matching = matchings[m1idx]
|
| 310 |
+
if (
|
| 311 |
+
matching is not None and m1Isomorphisms
|
| 312 |
+
): # m1 is empty for composite glyphs
|
| 313 |
+
m1Isomorphisms = [m1Isomorphisms[i] for i in matching]
|
| 314 |
+
m1Vectors = [m1Vectors[i] for i in matching]
|
| 315 |
+
recording1 = [recording1[i] for i in matching]
|
| 316 |
+
|
| 317 |
+
midRecording = []
|
| 318 |
+
for c0, c1 in zip(recording0, recording1):
|
| 319 |
+
try:
|
| 320 |
+
r = RecordingPen()
|
| 321 |
+
r.value = list(lerpRecordings(c0.value, c1.value))
|
| 322 |
+
midRecording.append(r)
|
| 323 |
+
except ValueError:
|
| 324 |
+
# Mismatch because of the reordering above
|
| 325 |
+
midRecording.append(None)
|
| 326 |
+
|
| 327 |
+
for ix, (contour0, contour1) in enumerate(
|
| 328 |
+
zip(m0Isomorphisms, m1Isomorphisms)
|
| 329 |
+
):
|
| 330 |
+
if (
|
| 331 |
+
contour0 is None
|
| 332 |
+
or contour1 is None
|
| 333 |
+
or len(contour0) == 0
|
| 334 |
+
or len(contour0) != len(contour1)
|
| 335 |
+
):
|
| 336 |
+
# We already reported this; or nothing to do; or not compatible
|
| 337 |
+
# after reordering above.
|
| 338 |
+
continue
|
| 339 |
+
|
| 340 |
+
this_tolerance, proposed_point, reverse = test_starting_point(
|
| 341 |
+
glyph0, glyph1, ix, tolerance, matching
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
if this_tolerance < tolerance:
|
| 345 |
+
yield (
|
| 346 |
+
glyph_name,
|
| 347 |
+
{
|
| 348 |
+
"type": InterpolatableProblem.WRONG_START_POINT,
|
| 349 |
+
"contour": ix,
|
| 350 |
+
"master_1": names[m0idx],
|
| 351 |
+
"master_2": names[m1idx],
|
| 352 |
+
"master_1_idx": m0idx,
|
| 353 |
+
"master_2_idx": m1idx,
|
| 354 |
+
"value_1": 0,
|
| 355 |
+
"value_2": proposed_point,
|
| 356 |
+
"reversed": reverse,
|
| 357 |
+
"tolerance": this_tolerance,
|
| 358 |
+
},
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
# Weight check.
|
| 362 |
+
#
|
| 363 |
+
# If contour could be mid-interpolated, and the two
|
| 364 |
+
# contours have the same area sign, proceeed.
|
| 365 |
+
#
|
| 366 |
+
# The sign difference can happen if it's a weirdo
|
| 367 |
+
# self-intersecting contour; ignore it.
|
| 368 |
+
contour = midRecording[ix]
|
| 369 |
+
|
| 370 |
+
if contour and (m0Vectors[ix][0] < 0) == (m1Vectors[ix][0] < 0):
|
| 371 |
+
midStats = StatisticsPen(glyphset=None)
|
| 372 |
+
contour.replay(midStats)
|
| 373 |
+
|
| 374 |
+
midVector = contour_vector_from_stats(midStats)
|
| 375 |
+
|
| 376 |
+
m0Vec = m0Vectors[ix]
|
| 377 |
+
m1Vec = m1Vectors[ix]
|
| 378 |
+
size0 = m0Vec[0] * m0Vec[0]
|
| 379 |
+
size1 = m1Vec[0] * m1Vec[0]
|
| 380 |
+
midSize = midVector[0] * midVector[0]
|
| 381 |
+
|
| 382 |
+
for overweight, problem_type in enumerate(
|
| 383 |
+
(
|
| 384 |
+
InterpolatableProblem.UNDERWEIGHT,
|
| 385 |
+
InterpolatableProblem.OVERWEIGHT,
|
| 386 |
+
)
|
| 387 |
+
):
|
| 388 |
+
if overweight:
|
| 389 |
+
expectedSize = max(size0, size1)
|
| 390 |
+
continue
|
| 391 |
+
else:
|
| 392 |
+
expectedSize = sqrt(size0 * size1)
|
| 393 |
+
|
| 394 |
+
log.debug(
|
| 395 |
+
"%s: actual size %g; threshold size %g, master sizes: %g, %g",
|
| 396 |
+
problem_type,
|
| 397 |
+
midSize,
|
| 398 |
+
expectedSize,
|
| 399 |
+
size0,
|
| 400 |
+
size1,
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
if (
|
| 404 |
+
not overweight and expectedSize * tolerance > midSize + 1e-5
|
| 405 |
+
) or (overweight and 1e-5 + expectedSize / tolerance < midSize):
|
| 406 |
+
try:
|
| 407 |
+
if overweight:
|
| 408 |
+
this_tolerance = expectedSize / midSize
|
| 409 |
+
else:
|
| 410 |
+
this_tolerance = midSize / expectedSize
|
| 411 |
+
except ZeroDivisionError:
|
| 412 |
+
this_tolerance = 0
|
| 413 |
+
log.debug("tolerance %g", this_tolerance)
|
| 414 |
+
yield (
|
| 415 |
+
glyph_name,
|
| 416 |
+
{
|
| 417 |
+
"type": problem_type,
|
| 418 |
+
"contour": ix,
|
| 419 |
+
"master_1": names[m0idx],
|
| 420 |
+
"master_2": names[m1idx],
|
| 421 |
+
"master_1_idx": m0idx,
|
| 422 |
+
"master_2_idx": m1idx,
|
| 423 |
+
"tolerance": this_tolerance,
|
| 424 |
+
},
|
| 425 |
+
)
|
| 426 |
+
|
| 427 |
+
#
|
| 428 |
+
# "kink" detector
|
| 429 |
+
#
|
| 430 |
+
m0 = glyph0.points
|
| 431 |
+
m1 = glyph1.points
|
| 432 |
+
|
| 433 |
+
# If contour-order is wrong, adjust it
|
| 434 |
+
if matchings[m1idx] is not None and m1: # m1 is empty for composite glyphs
|
| 435 |
+
m1 = [m1[i] for i in matchings[m1idx]]
|
| 436 |
+
|
| 437 |
+
t = 0.1 # ~sin(radian(6)) for tolerance 0.95
|
| 438 |
+
deviation_threshold = (
|
| 439 |
+
upem * DEFAULT_KINKINESS_LENGTH * DEFAULT_KINKINESS / kinkiness
|
| 440 |
+
)
|
| 441 |
+
|
| 442 |
+
for ix, (contour0, contour1) in enumerate(zip(m0, m1)):
|
| 443 |
+
if (
|
| 444 |
+
contour0 is None
|
| 445 |
+
or contour1 is None
|
| 446 |
+
or len(contour0) == 0
|
| 447 |
+
or len(contour0) != len(contour1)
|
| 448 |
+
):
|
| 449 |
+
# We already reported this; or nothing to do; or not compatible
|
| 450 |
+
# after reordering above.
|
| 451 |
+
continue
|
| 452 |
+
|
| 453 |
+
# Walk the contour, keeping track of three consecutive points, with
|
| 454 |
+
# middle one being an on-curve. If the three are co-linear then
|
| 455 |
+
# check for kinky-ness.
|
| 456 |
+
for i in range(len(contour0)):
|
| 457 |
+
pt0 = contour0[i]
|
| 458 |
+
pt1 = contour1[i]
|
| 459 |
+
if not pt0[1] or not pt1[1]:
|
| 460 |
+
# Skip off-curves
|
| 461 |
+
continue
|
| 462 |
+
pt0_prev = contour0[i - 1]
|
| 463 |
+
pt1_prev = contour1[i - 1]
|
| 464 |
+
pt0_next = contour0[(i + 1) % len(contour0)]
|
| 465 |
+
pt1_next = contour1[(i + 1) % len(contour1)]
|
| 466 |
+
|
| 467 |
+
if pt0_prev[1] and pt1_prev[1]:
|
| 468 |
+
# At least one off-curve is required
|
| 469 |
+
continue
|
| 470 |
+
if pt0_prev[1] and pt1_prev[1]:
|
| 471 |
+
# At least one off-curve is required
|
| 472 |
+
continue
|
| 473 |
+
|
| 474 |
+
pt0 = complex(*pt0[0])
|
| 475 |
+
pt1 = complex(*pt1[0])
|
| 476 |
+
pt0_prev = complex(*pt0_prev[0])
|
| 477 |
+
pt1_prev = complex(*pt1_prev[0])
|
| 478 |
+
pt0_next = complex(*pt0_next[0])
|
| 479 |
+
pt1_next = complex(*pt1_next[0])
|
| 480 |
+
|
| 481 |
+
# We have three consecutive points. Check whether
|
| 482 |
+
# they are colinear.
|
| 483 |
+
d0_prev = pt0 - pt0_prev
|
| 484 |
+
d0_next = pt0_next - pt0
|
| 485 |
+
d1_prev = pt1 - pt1_prev
|
| 486 |
+
d1_next = pt1_next - pt1
|
| 487 |
+
|
| 488 |
+
sin0 = d0_prev.real * d0_next.imag - d0_prev.imag * d0_next.real
|
| 489 |
+
sin1 = d1_prev.real * d1_next.imag - d1_prev.imag * d1_next.real
|
| 490 |
+
try:
|
| 491 |
+
sin0 /= abs(d0_prev) * abs(d0_next)
|
| 492 |
+
sin1 /= abs(d1_prev) * abs(d1_next)
|
| 493 |
+
except ZeroDivisionError:
|
| 494 |
+
continue
|
| 495 |
+
|
| 496 |
+
if abs(sin0) > t or abs(sin1) > t:
|
| 497 |
+
# Not colinear / not smooth.
|
| 498 |
+
continue
|
| 499 |
+
|
| 500 |
+
# Check the mid-point is actually, well, in the middle.
|
| 501 |
+
dot0 = d0_prev.real * d0_next.real + d0_prev.imag * d0_next.imag
|
| 502 |
+
dot1 = d1_prev.real * d1_next.real + d1_prev.imag * d1_next.imag
|
| 503 |
+
if dot0 < 0 or dot1 < 0:
|
| 504 |
+
# Sharp corner.
|
| 505 |
+
continue
|
| 506 |
+
|
| 507 |
+
# Fine, if handle ratios are similar...
|
| 508 |
+
r0 = abs(d0_prev) / (abs(d0_prev) + abs(d0_next))
|
| 509 |
+
r1 = abs(d1_prev) / (abs(d1_prev) + abs(d1_next))
|
| 510 |
+
r_diff = abs(r0 - r1)
|
| 511 |
+
if abs(r_diff) < t:
|
| 512 |
+
# Smooth enough.
|
| 513 |
+
continue
|
| 514 |
+
|
| 515 |
+
mid = (pt0 + pt1) / 2
|
| 516 |
+
mid_prev = (pt0_prev + pt1_prev) / 2
|
| 517 |
+
mid_next = (pt0_next + pt1_next) / 2
|
| 518 |
+
|
| 519 |
+
mid_d0 = mid - mid_prev
|
| 520 |
+
mid_d1 = mid_next - mid
|
| 521 |
+
|
| 522 |
+
sin_mid = mid_d0.real * mid_d1.imag - mid_d0.imag * mid_d1.real
|
| 523 |
+
try:
|
| 524 |
+
sin_mid /= abs(mid_d0) * abs(mid_d1)
|
| 525 |
+
except ZeroDivisionError:
|
| 526 |
+
continue
|
| 527 |
+
|
| 528 |
+
# ...or if the angles are similar.
|
| 529 |
+
if abs(sin_mid) * (tolerance * kinkiness) <= t:
|
| 530 |
+
# Smooth enough.
|
| 531 |
+
continue
|
| 532 |
+
|
| 533 |
+
# How visible is the kink?
|
| 534 |
+
|
| 535 |
+
cross = sin_mid * abs(mid_d0) * abs(mid_d1)
|
| 536 |
+
arc_len = abs(mid_d0 + mid_d1)
|
| 537 |
+
deviation = abs(cross / arc_len)
|
| 538 |
+
if deviation < deviation_threshold:
|
| 539 |
+
continue
|
| 540 |
+
deviation_ratio = deviation / arc_len
|
| 541 |
+
if deviation_ratio > t:
|
| 542 |
+
continue
|
| 543 |
+
|
| 544 |
+
this_tolerance = t / (abs(sin_mid) * kinkiness)
|
| 545 |
+
|
| 546 |
+
log.debug(
|
| 547 |
+
"kink: deviation %g; deviation_ratio %g; sin_mid %g; r_diff %g",
|
| 548 |
+
deviation,
|
| 549 |
+
deviation_ratio,
|
| 550 |
+
sin_mid,
|
| 551 |
+
r_diff,
|
| 552 |
+
)
|
| 553 |
+
log.debug("tolerance %g", this_tolerance)
|
| 554 |
+
yield (
|
| 555 |
+
glyph_name,
|
| 556 |
+
{
|
| 557 |
+
"type": InterpolatableProblem.KINK,
|
| 558 |
+
"contour": ix,
|
| 559 |
+
"master_1": names[m0idx],
|
| 560 |
+
"master_2": names[m1idx],
|
| 561 |
+
"master_1_idx": m0idx,
|
| 562 |
+
"master_2_idx": m1idx,
|
| 563 |
+
"value": i,
|
| 564 |
+
"tolerance": this_tolerance,
|
| 565 |
+
},
|
| 566 |
+
)
|
| 567 |
+
|
| 568 |
+
#
|
| 569 |
+
# --show-all
|
| 570 |
+
#
|
| 571 |
+
|
| 572 |
+
if show_all:
|
| 573 |
+
yield (
|
| 574 |
+
glyph_name,
|
| 575 |
+
{
|
| 576 |
+
"type": InterpolatableProblem.NOTHING,
|
| 577 |
+
"master_1": names[m0idx],
|
| 578 |
+
"master_2": names[m1idx],
|
| 579 |
+
"master_1_idx": m0idx,
|
| 580 |
+
"master_2_idx": m1idx,
|
| 581 |
+
},
|
| 582 |
+
)
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
@wraps(test_gen)
|
| 586 |
+
def test(*args, **kwargs):
|
| 587 |
+
problems = defaultdict(list)
|
| 588 |
+
for glyphname, problem in test_gen(*args, **kwargs):
|
| 589 |
+
problems[glyphname].append(problem)
|
| 590 |
+
return problems
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
def recursivelyAddGlyph(glyphname, glyphset, ttGlyphSet, glyf):
|
| 594 |
+
if glyphname in glyphset:
|
| 595 |
+
return
|
| 596 |
+
glyphset[glyphname] = ttGlyphSet[glyphname]
|
| 597 |
+
|
| 598 |
+
for component in getattr(glyf[glyphname], "components", []):
|
| 599 |
+
recursivelyAddGlyph(component.glyphName, glyphset, ttGlyphSet, glyf)
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
def ensure_parent_dir(path):
|
| 603 |
+
dirname = os.path.dirname(path)
|
| 604 |
+
if dirname:
|
| 605 |
+
os.makedirs(dirname, exist_ok=True)
|
| 606 |
+
return path
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
def main(args=None):
|
| 610 |
+
"""Test for interpolatability issues between fonts"""
|
| 611 |
+
import argparse
|
| 612 |
+
import sys
|
| 613 |
+
|
| 614 |
+
parser = argparse.ArgumentParser(
|
| 615 |
+
"fonttools varLib.interpolatable",
|
| 616 |
+
description=main.__doc__,
|
| 617 |
+
)
|
| 618 |
+
parser.add_argument(
|
| 619 |
+
"--glyphs",
|
| 620 |
+
action="store",
|
| 621 |
+
help="Space-separate name of glyphs to check",
|
| 622 |
+
)
|
| 623 |
+
parser.add_argument(
|
| 624 |
+
"--show-all",
|
| 625 |
+
action="store_true",
|
| 626 |
+
help="Show all glyph pairs, even if no problems are found",
|
| 627 |
+
)
|
| 628 |
+
parser.add_argument(
|
| 629 |
+
"--tolerance",
|
| 630 |
+
action="store",
|
| 631 |
+
type=float,
|
| 632 |
+
help="Error tolerance. Between 0 and 1. Default %s" % DEFAULT_TOLERANCE,
|
| 633 |
+
)
|
| 634 |
+
parser.add_argument(
|
| 635 |
+
"--kinkiness",
|
| 636 |
+
action="store",
|
| 637 |
+
type=float,
|
| 638 |
+
help="How aggressively report kinks. Default %s" % DEFAULT_KINKINESS,
|
| 639 |
+
)
|
| 640 |
+
parser.add_argument(
|
| 641 |
+
"--json",
|
| 642 |
+
action="store_true",
|
| 643 |
+
help="Output report in JSON format",
|
| 644 |
+
)
|
| 645 |
+
parser.add_argument(
|
| 646 |
+
"--pdf",
|
| 647 |
+
action="store",
|
| 648 |
+
help="Output report in PDF format",
|
| 649 |
+
)
|
| 650 |
+
parser.add_argument(
|
| 651 |
+
"--ps",
|
| 652 |
+
action="store",
|
| 653 |
+
help="Output report in PostScript format",
|
| 654 |
+
)
|
| 655 |
+
parser.add_argument(
|
| 656 |
+
"--html",
|
| 657 |
+
action="store",
|
| 658 |
+
help="Output report in HTML format",
|
| 659 |
+
)
|
| 660 |
+
parser.add_argument(
|
| 661 |
+
"--quiet",
|
| 662 |
+
action="store_true",
|
| 663 |
+
help="Only exit with code 1 or 0, no output",
|
| 664 |
+
)
|
| 665 |
+
parser.add_argument(
|
| 666 |
+
"--output",
|
| 667 |
+
action="store",
|
| 668 |
+
help="Output file for the problem report; Default: stdout",
|
| 669 |
+
)
|
| 670 |
+
parser.add_argument(
|
| 671 |
+
"--ignore-missing",
|
| 672 |
+
action="store_true",
|
| 673 |
+
help="Will not report glyphs missing from sparse masters as errors",
|
| 674 |
+
)
|
| 675 |
+
parser.add_argument(
|
| 676 |
+
"inputs",
|
| 677 |
+
metavar="FILE",
|
| 678 |
+
type=str,
|
| 679 |
+
nargs="+",
|
| 680 |
+
help="Input a single variable font / DesignSpace / Glyphs file, or multiple TTF/UFO files",
|
| 681 |
+
)
|
| 682 |
+
parser.add_argument(
|
| 683 |
+
"--name",
|
| 684 |
+
metavar="NAME",
|
| 685 |
+
type=str,
|
| 686 |
+
action="append",
|
| 687 |
+
help="Name of the master to use in the report. If not provided, all are used.",
|
| 688 |
+
)
|
| 689 |
+
parser.add_argument("-v", "--verbose", action="store_true", help="Run verbosely.")
|
| 690 |
+
parser.add_argument("--debug", action="store_true", help="Run with debug output.")
|
| 691 |
+
|
| 692 |
+
args = parser.parse_args(args)
|
| 693 |
+
|
| 694 |
+
from fontTools import configLogger
|
| 695 |
+
|
| 696 |
+
configLogger(level=("INFO" if args.verbose else "ERROR"))
|
| 697 |
+
if args.debug:
|
| 698 |
+
configLogger(level="DEBUG")
|
| 699 |
+
|
| 700 |
+
glyphs = args.glyphs.split() if args.glyphs else None
|
| 701 |
+
|
| 702 |
+
from os.path import basename
|
| 703 |
+
|
| 704 |
+
fonts = []
|
| 705 |
+
names = []
|
| 706 |
+
locations = []
|
| 707 |
+
discrete_axes = set()
|
| 708 |
+
upem = DEFAULT_UPEM
|
| 709 |
+
|
| 710 |
+
original_args_inputs = tuple(args.inputs)
|
| 711 |
+
|
| 712 |
+
if len(args.inputs) == 1:
|
| 713 |
+
designspace = None
|
| 714 |
+
if args.inputs[0].endswith(".designspace"):
|
| 715 |
+
from fontTools.designspaceLib import DesignSpaceDocument
|
| 716 |
+
|
| 717 |
+
designspace = DesignSpaceDocument.fromfile(args.inputs[0])
|
| 718 |
+
args.inputs = [master.path for master in designspace.sources]
|
| 719 |
+
locations = [master.location for master in designspace.sources]
|
| 720 |
+
discrete_axes = {
|
| 721 |
+
a.name for a in designspace.axes if not hasattr(a, "minimum")
|
| 722 |
+
}
|
| 723 |
+
axis_triples = {
|
| 724 |
+
a.name: (a.minimum, a.default, a.maximum)
|
| 725 |
+
for a in designspace.axes
|
| 726 |
+
if a.name not in discrete_axes
|
| 727 |
+
}
|
| 728 |
+
axis_mappings = {a.name: a.map for a in designspace.axes}
|
| 729 |
+
axis_triples = {
|
| 730 |
+
k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
|
| 731 |
+
for k, vv in axis_triples.items()
|
| 732 |
+
}
|
| 733 |
+
|
| 734 |
+
elif args.inputs[0].endswith((".glyphs", ".glyphspackage")):
|
| 735 |
+
from glyphsLib import GSFont, to_designspace
|
| 736 |
+
|
| 737 |
+
gsfont = GSFont(args.inputs[0])
|
| 738 |
+
upem = gsfont.upm
|
| 739 |
+
designspace = to_designspace(gsfont)
|
| 740 |
+
fonts = [source.font for source in designspace.sources]
|
| 741 |
+
names = ["%s-%s" % (f.info.familyName, f.info.styleName) for f in fonts]
|
| 742 |
+
args.inputs = []
|
| 743 |
+
locations = [master.location for master in designspace.sources]
|
| 744 |
+
axis_triples = {
|
| 745 |
+
a.name: (a.minimum, a.default, a.maximum) for a in designspace.axes
|
| 746 |
+
}
|
| 747 |
+
axis_mappings = {a.name: a.map for a in designspace.axes}
|
| 748 |
+
axis_triples = {
|
| 749 |
+
k: tuple(piecewiseLinearMap(v, dict(axis_mappings[k])) for v in vv)
|
| 750 |
+
for k, vv in axis_triples.items()
|
| 751 |
+
}
|
| 752 |
+
|
| 753 |
+
elif args.inputs[0].endswith(".ttf"):
|
| 754 |
+
from fontTools.ttLib import TTFont
|
| 755 |
+
|
| 756 |
+
font = TTFont(args.inputs[0])
|
| 757 |
+
upem = font["head"].unitsPerEm
|
| 758 |
+
if "gvar" in font:
|
| 759 |
+
# Is variable font
|
| 760 |
+
|
| 761 |
+
fvar = font["fvar"]
|
| 762 |
+
axisMapping = {}
|
| 763 |
+
for axis in fvar.axes:
|
| 764 |
+
axisMapping[axis.axisTag] = {
|
| 765 |
+
-1: axis.minValue,
|
| 766 |
+
0: axis.defaultValue,
|
| 767 |
+
1: axis.maxValue,
|
| 768 |
+
}
|
| 769 |
+
normalized = False
|
| 770 |
+
if "avar" in font:
|
| 771 |
+
avar = font["avar"]
|
| 772 |
+
if getattr(avar.table, "VarStore", None):
|
| 773 |
+
axisMapping = {tag: {-1: -1, 0: 0, 1: 1} for tag in axisMapping}
|
| 774 |
+
normalized = True
|
| 775 |
+
else:
|
| 776 |
+
for axisTag, segments in avar.segments.items():
|
| 777 |
+
fvarMapping = axisMapping[axisTag].copy()
|
| 778 |
+
for location, value in segments.items():
|
| 779 |
+
axisMapping[axisTag][value] = piecewiseLinearMap(
|
| 780 |
+
location, fvarMapping
|
| 781 |
+
)
|
| 782 |
+
|
| 783 |
+
gvar = font["gvar"]
|
| 784 |
+
glyf = font["glyf"]
|
| 785 |
+
# Gather all glyphs at their "master" locations
|
| 786 |
+
ttGlyphSets = {}
|
| 787 |
+
glyphsets = defaultdict(dict)
|
| 788 |
+
|
| 789 |
+
if glyphs is None:
|
| 790 |
+
glyphs = sorted(gvar.variations.keys())
|
| 791 |
+
for glyphname in glyphs:
|
| 792 |
+
for var in gvar.variations[glyphname]:
|
| 793 |
+
locDict = {}
|
| 794 |
+
loc = []
|
| 795 |
+
for tag, val in sorted(var.axes.items()):
|
| 796 |
+
locDict[tag] = val[1]
|
| 797 |
+
loc.append((tag, val[1]))
|
| 798 |
+
|
| 799 |
+
locTuple = tuple(loc)
|
| 800 |
+
if locTuple not in ttGlyphSets:
|
| 801 |
+
ttGlyphSets[locTuple] = font.getGlyphSet(
|
| 802 |
+
location=locDict, normalized=True, recalcBounds=False
|
| 803 |
+
)
|
| 804 |
+
|
| 805 |
+
recursivelyAddGlyph(
|
| 806 |
+
glyphname, glyphsets[locTuple], ttGlyphSets[locTuple], glyf
|
| 807 |
+
)
|
| 808 |
+
|
| 809 |
+
names = ["''"]
|
| 810 |
+
fonts = [font.getGlyphSet()]
|
| 811 |
+
locations = [{}]
|
| 812 |
+
axis_triples = {a: (-1, 0, +1) for a in sorted(axisMapping.keys())}
|
| 813 |
+
for locTuple in sorted(glyphsets.keys(), key=lambda v: (len(v), v)):
|
| 814 |
+
name = (
|
| 815 |
+
"'"
|
| 816 |
+
+ " ".join(
|
| 817 |
+
"%s=%s"
|
| 818 |
+
% (
|
| 819 |
+
k,
|
| 820 |
+
floatToFixedToStr(
|
| 821 |
+
piecewiseLinearMap(v, axisMapping[k]), 14
|
| 822 |
+
),
|
| 823 |
+
)
|
| 824 |
+
for k, v in locTuple
|
| 825 |
+
)
|
| 826 |
+
+ "'"
|
| 827 |
+
)
|
| 828 |
+
if normalized:
|
| 829 |
+
name += " (normalized)"
|
| 830 |
+
names.append(name)
|
| 831 |
+
fonts.append(glyphsets[locTuple])
|
| 832 |
+
locations.append(dict(locTuple))
|
| 833 |
+
args.ignore_missing = True
|
| 834 |
+
args.inputs = []
|
| 835 |
+
|
| 836 |
+
if not locations:
|
| 837 |
+
locations = [{} for _ in fonts]
|
| 838 |
+
|
| 839 |
+
for filename in args.inputs:
|
| 840 |
+
if filename.endswith(".ufo"):
|
| 841 |
+
from fontTools.ufoLib import UFOReader
|
| 842 |
+
|
| 843 |
+
font = UFOReader(filename)
|
| 844 |
+
info = SimpleNamespace()
|
| 845 |
+
font.readInfo(info)
|
| 846 |
+
upem = info.unitsPerEm
|
| 847 |
+
fonts.append(font)
|
| 848 |
+
else:
|
| 849 |
+
from fontTools.ttLib import TTFont
|
| 850 |
+
|
| 851 |
+
font = TTFont(filename)
|
| 852 |
+
upem = font["head"].unitsPerEm
|
| 853 |
+
fonts.append(font)
|
| 854 |
+
|
| 855 |
+
names.append(basename(filename).rsplit(".", 1)[0])
|
| 856 |
+
|
| 857 |
+
glyphsets = []
|
| 858 |
+
for font in fonts:
|
| 859 |
+
if hasattr(font, "getGlyphSet"):
|
| 860 |
+
glyphset = font.getGlyphSet()
|
| 861 |
+
else:
|
| 862 |
+
glyphset = font
|
| 863 |
+
glyphsets.append({k: glyphset[k] for k in glyphset.keys()})
|
| 864 |
+
|
| 865 |
+
if args.name:
|
| 866 |
+
accepted_names = set(args.name)
|
| 867 |
+
glyphsets = [
|
| 868 |
+
glyphset
|
| 869 |
+
for name, glyphset in zip(names, glyphsets)
|
| 870 |
+
if name in accepted_names
|
| 871 |
+
]
|
| 872 |
+
locations = [
|
| 873 |
+
location
|
| 874 |
+
for name, location in zip(names, locations)
|
| 875 |
+
if name in accepted_names
|
| 876 |
+
]
|
| 877 |
+
names = [name for name in names if name in accepted_names]
|
| 878 |
+
|
| 879 |
+
if not glyphs:
|
| 880 |
+
glyphs = sorted(set([gn for glyphset in glyphsets for gn in glyphset.keys()]))
|
| 881 |
+
|
| 882 |
+
glyphsSet = set(glyphs)
|
| 883 |
+
for glyphset in glyphsets:
|
| 884 |
+
glyphSetGlyphNames = set(glyphset.keys())
|
| 885 |
+
diff = glyphsSet - glyphSetGlyphNames
|
| 886 |
+
if diff:
|
| 887 |
+
for gn in diff:
|
| 888 |
+
glyphset[gn] = None
|
| 889 |
+
|
| 890 |
+
# Normalize locations
|
| 891 |
+
locations = [
|
| 892 |
+
{
|
| 893 |
+
**normalizeLocation(loc, axis_triples),
|
| 894 |
+
**{k: v for k, v in loc.items() if k in discrete_axes},
|
| 895 |
+
}
|
| 896 |
+
for loc in locations
|
| 897 |
+
]
|
| 898 |
+
tolerance = args.tolerance or DEFAULT_TOLERANCE
|
| 899 |
+
kinkiness = args.kinkiness if args.kinkiness is not None else DEFAULT_KINKINESS
|
| 900 |
+
|
| 901 |
+
try:
|
| 902 |
+
log.info("Running on %d glyphsets", len(glyphsets))
|
| 903 |
+
log.info("Locations: %s", pformat(locations))
|
| 904 |
+
problems_gen = test_gen(
|
| 905 |
+
glyphsets,
|
| 906 |
+
glyphs=glyphs,
|
| 907 |
+
names=names,
|
| 908 |
+
locations=locations,
|
| 909 |
+
upem=upem,
|
| 910 |
+
ignore_missing=args.ignore_missing,
|
| 911 |
+
tolerance=tolerance,
|
| 912 |
+
kinkiness=kinkiness,
|
| 913 |
+
show_all=args.show_all,
|
| 914 |
+
discrete_axes=discrete_axes,
|
| 915 |
+
)
|
| 916 |
+
problems = defaultdict(list)
|
| 917 |
+
|
| 918 |
+
f = (
|
| 919 |
+
sys.stdout
|
| 920 |
+
if args.output is None
|
| 921 |
+
else open(ensure_parent_dir(args.output), "w")
|
| 922 |
+
)
|
| 923 |
+
|
| 924 |
+
if not args.quiet:
|
| 925 |
+
if args.json:
|
| 926 |
+
import json
|
| 927 |
+
|
| 928 |
+
for glyphname, problem in problems_gen:
|
| 929 |
+
problems[glyphname].append(problem)
|
| 930 |
+
|
| 931 |
+
print(json.dumps(problems), file=f)
|
| 932 |
+
else:
|
| 933 |
+
last_glyphname = None
|
| 934 |
+
for glyphname, p in problems_gen:
|
| 935 |
+
problems[glyphname].append(p)
|
| 936 |
+
|
| 937 |
+
if glyphname != last_glyphname:
|
| 938 |
+
print(f"Glyph {glyphname} was not compatible:", file=f)
|
| 939 |
+
last_glyphname = glyphname
|
| 940 |
+
last_master_idxs = None
|
| 941 |
+
|
| 942 |
+
master_idxs = (
|
| 943 |
+
(p["master_idx"],)
|
| 944 |
+
if "master_idx" in p
|
| 945 |
+
else (p["master_1_idx"], p["master_2_idx"])
|
| 946 |
+
)
|
| 947 |
+
if master_idxs != last_master_idxs:
|
| 948 |
+
master_names = (
|
| 949 |
+
(p["master"],)
|
| 950 |
+
if "master" in p
|
| 951 |
+
else (p["master_1"], p["master_2"])
|
| 952 |
+
)
|
| 953 |
+
print(f" Masters: %s:" % ", ".join(master_names), file=f)
|
| 954 |
+
last_master_idxs = master_idxs
|
| 955 |
+
|
| 956 |
+
if p["type"] == InterpolatableProblem.MISSING:
|
| 957 |
+
print(
|
| 958 |
+
" Glyph was missing in master %s" % p["master"], file=f
|
| 959 |
+
)
|
| 960 |
+
elif p["type"] == InterpolatableProblem.OPEN_PATH:
|
| 961 |
+
print(
|
| 962 |
+
" Glyph has an open path in master %s" % p["master"],
|
| 963 |
+
file=f,
|
| 964 |
+
)
|
| 965 |
+
elif p["type"] == InterpolatableProblem.PATH_COUNT:
|
| 966 |
+
print(
|
| 967 |
+
" Path count differs: %i in %s, %i in %s"
|
| 968 |
+
% (
|
| 969 |
+
p["value_1"],
|
| 970 |
+
p["master_1"],
|
| 971 |
+
p["value_2"],
|
| 972 |
+
p["master_2"],
|
| 973 |
+
),
|
| 974 |
+
file=f,
|
| 975 |
+
)
|
| 976 |
+
elif p["type"] == InterpolatableProblem.NODE_COUNT:
|
| 977 |
+
print(
|
| 978 |
+
" Node count differs in path %i: %i in %s, %i in %s"
|
| 979 |
+
% (
|
| 980 |
+
p["path"],
|
| 981 |
+
p["value_1"],
|
| 982 |
+
p["master_1"],
|
| 983 |
+
p["value_2"],
|
| 984 |
+
p["master_2"],
|
| 985 |
+
),
|
| 986 |
+
file=f,
|
| 987 |
+
)
|
| 988 |
+
elif p["type"] == InterpolatableProblem.NODE_INCOMPATIBILITY:
|
| 989 |
+
print(
|
| 990 |
+
" Node %o incompatible in path %i: %s in %s, %s in %s"
|
| 991 |
+
% (
|
| 992 |
+
p["node"],
|
| 993 |
+
p["path"],
|
| 994 |
+
p["value_1"],
|
| 995 |
+
p["master_1"],
|
| 996 |
+
p["value_2"],
|
| 997 |
+
p["master_2"],
|
| 998 |
+
),
|
| 999 |
+
file=f,
|
| 1000 |
+
)
|
| 1001 |
+
elif p["type"] == InterpolatableProblem.CONTOUR_ORDER:
|
| 1002 |
+
print(
|
| 1003 |
+
" Contour order differs: %s in %s, %s in %s"
|
| 1004 |
+
% (
|
| 1005 |
+
p["value_1"],
|
| 1006 |
+
p["master_1"],
|
| 1007 |
+
p["value_2"],
|
| 1008 |
+
p["master_2"],
|
| 1009 |
+
),
|
| 1010 |
+
file=f,
|
| 1011 |
+
)
|
| 1012 |
+
elif p["type"] == InterpolatableProblem.WRONG_START_POINT:
|
| 1013 |
+
print(
|
| 1014 |
+
" Contour %d start point differs: %s in %s, %s in %s; reversed: %s"
|
| 1015 |
+
% (
|
| 1016 |
+
p["contour"],
|
| 1017 |
+
p["value_1"],
|
| 1018 |
+
p["master_1"],
|
| 1019 |
+
p["value_2"],
|
| 1020 |
+
p["master_2"],
|
| 1021 |
+
p["reversed"],
|
| 1022 |
+
),
|
| 1023 |
+
file=f,
|
| 1024 |
+
)
|
| 1025 |
+
elif p["type"] == InterpolatableProblem.UNDERWEIGHT:
|
| 1026 |
+
print(
|
| 1027 |
+
" Contour %d interpolation is underweight: %s, %s"
|
| 1028 |
+
% (
|
| 1029 |
+
p["contour"],
|
| 1030 |
+
p["master_1"],
|
| 1031 |
+
p["master_2"],
|
| 1032 |
+
),
|
| 1033 |
+
file=f,
|
| 1034 |
+
)
|
| 1035 |
+
elif p["type"] == InterpolatableProblem.OVERWEIGHT:
|
| 1036 |
+
print(
|
| 1037 |
+
" Contour %d interpolation is overweight: %s, %s"
|
| 1038 |
+
% (
|
| 1039 |
+
p["contour"],
|
| 1040 |
+
p["master_1"],
|
| 1041 |
+
p["master_2"],
|
| 1042 |
+
),
|
| 1043 |
+
file=f,
|
| 1044 |
+
)
|
| 1045 |
+
elif p["type"] == InterpolatableProblem.KINK:
|
| 1046 |
+
print(
|
| 1047 |
+
" Contour %d has a kink at %s: %s, %s"
|
| 1048 |
+
% (
|
| 1049 |
+
p["contour"],
|
| 1050 |
+
p["value"],
|
| 1051 |
+
p["master_1"],
|
| 1052 |
+
p["master_2"],
|
| 1053 |
+
),
|
| 1054 |
+
file=f,
|
| 1055 |
+
)
|
| 1056 |
+
elif p["type"] == InterpolatableProblem.NOTHING:
|
| 1057 |
+
print(
|
| 1058 |
+
" Showing %s and %s"
|
| 1059 |
+
% (
|
| 1060 |
+
p["master_1"],
|
| 1061 |
+
p["master_2"],
|
| 1062 |
+
),
|
| 1063 |
+
file=f,
|
| 1064 |
+
)
|
| 1065 |
+
else:
|
| 1066 |
+
for glyphname, problem in problems_gen:
|
| 1067 |
+
problems[glyphname].append(problem)
|
| 1068 |
+
|
| 1069 |
+
problems = sort_problems(problems)
|
| 1070 |
+
|
| 1071 |
+
for p in "ps", "pdf":
|
| 1072 |
+
arg = getattr(args, p)
|
| 1073 |
+
if arg is None:
|
| 1074 |
+
continue
|
| 1075 |
+
log.info("Writing %s to %s", p.upper(), arg)
|
| 1076 |
+
from .interpolatablePlot import InterpolatablePS, InterpolatablePDF
|
| 1077 |
+
|
| 1078 |
+
PlotterClass = InterpolatablePS if p == "ps" else InterpolatablePDF
|
| 1079 |
+
|
| 1080 |
+
with PlotterClass(
|
| 1081 |
+
ensure_parent_dir(arg), glyphsets=glyphsets, names=names
|
| 1082 |
+
) as doc:
|
| 1083 |
+
doc.add_title_page(
|
| 1084 |
+
original_args_inputs, tolerance=tolerance, kinkiness=kinkiness
|
| 1085 |
+
)
|
| 1086 |
+
if problems:
|
| 1087 |
+
doc.add_summary(problems)
|
| 1088 |
+
doc.add_problems(problems)
|
| 1089 |
+
if not problems and not args.quiet:
|
| 1090 |
+
doc.draw_cupcake()
|
| 1091 |
+
if problems:
|
| 1092 |
+
doc.add_index()
|
| 1093 |
+
doc.add_table_of_contents()
|
| 1094 |
+
|
| 1095 |
+
if args.html:
|
| 1096 |
+
log.info("Writing HTML to %s", args.html)
|
| 1097 |
+
from .interpolatablePlot import InterpolatableSVG
|
| 1098 |
+
|
| 1099 |
+
svgs = []
|
| 1100 |
+
glyph_starts = {}
|
| 1101 |
+
with InterpolatableSVG(svgs, glyphsets=glyphsets, names=names) as svg:
|
| 1102 |
+
svg.add_title_page(
|
| 1103 |
+
original_args_inputs,
|
| 1104 |
+
show_tolerance=False,
|
| 1105 |
+
tolerance=tolerance,
|
| 1106 |
+
kinkiness=kinkiness,
|
| 1107 |
+
)
|
| 1108 |
+
for glyph, glyph_problems in problems.items():
|
| 1109 |
+
glyph_starts[len(svgs)] = glyph
|
| 1110 |
+
svg.add_problems(
|
| 1111 |
+
{glyph: glyph_problems},
|
| 1112 |
+
show_tolerance=False,
|
| 1113 |
+
show_page_number=False,
|
| 1114 |
+
)
|
| 1115 |
+
if not problems and not args.quiet:
|
| 1116 |
+
svg.draw_cupcake()
|
| 1117 |
+
|
| 1118 |
+
import base64
|
| 1119 |
+
|
| 1120 |
+
with open(ensure_parent_dir(args.html), "wb") as f:
|
| 1121 |
+
f.write(b"<!DOCTYPE html>\n")
|
| 1122 |
+
f.write(
|
| 1123 |
+
b'<html><body align="center" style="font-family: sans-serif; text-color: #222">\n'
|
| 1124 |
+
)
|
| 1125 |
+
f.write(b"<title>fonttools varLib.interpolatable report</title>\n")
|
| 1126 |
+
for i, svg in enumerate(svgs):
|
| 1127 |
+
if i in glyph_starts:
|
| 1128 |
+
f.write(f"<h1>Glyph {glyph_starts[i]}</h1>\n".encode("utf-8"))
|
| 1129 |
+
f.write("<img src='data:image/svg+xml;base64,".encode("utf-8"))
|
| 1130 |
+
f.write(base64.b64encode(svg))
|
| 1131 |
+
f.write(b"' />\n")
|
| 1132 |
+
f.write(b"<hr>\n")
|
| 1133 |
+
f.write(b"</body></html>\n")
|
| 1134 |
+
|
| 1135 |
+
except Exception as e:
|
| 1136 |
+
e.args += original_args_inputs
|
| 1137 |
+
log.error(e)
|
| 1138 |
+
raise
|
| 1139 |
+
|
| 1140 |
+
if problems:
|
| 1141 |
+
return problems
|
| 1142 |
+
|
| 1143 |
+
|
| 1144 |
+
if __name__ == "__main__":
|
| 1145 |
+
import sys
|
| 1146 |
+
|
| 1147 |
+
problems = main()
|
| 1148 |
+
sys.exit(int(bool(problems)))
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/interpolatableHelpers.py
ADDED
|
@@ -0,0 +1,396 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
|
| 2 |
+
from fontTools.pens.basePen import AbstractPen, BasePen, DecomposingPen
|
| 3 |
+
from fontTools.pens.pointPen import AbstractPointPen, SegmentToPointPen
|
| 4 |
+
from fontTools.pens.recordingPen import RecordingPen, DecomposingRecordingPen
|
| 5 |
+
from fontTools.misc.transform import Transform
|
| 6 |
+
from collections import defaultdict, deque
|
| 7 |
+
from math import sqrt, copysign, atan2, pi
|
| 8 |
+
from enum import Enum
|
| 9 |
+
import itertools
|
| 10 |
+
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
log = logging.getLogger("fontTools.varLib.interpolatable")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class InterpolatableProblem:
|
| 17 |
+
NOTHING = "nothing"
|
| 18 |
+
MISSING = "missing"
|
| 19 |
+
OPEN_PATH = "open_path"
|
| 20 |
+
PATH_COUNT = "path_count"
|
| 21 |
+
NODE_COUNT = "node_count"
|
| 22 |
+
NODE_INCOMPATIBILITY = "node_incompatibility"
|
| 23 |
+
CONTOUR_ORDER = "contour_order"
|
| 24 |
+
WRONG_START_POINT = "wrong_start_point"
|
| 25 |
+
KINK = "kink"
|
| 26 |
+
UNDERWEIGHT = "underweight"
|
| 27 |
+
OVERWEIGHT = "overweight"
|
| 28 |
+
|
| 29 |
+
severity = {
|
| 30 |
+
MISSING: 1,
|
| 31 |
+
OPEN_PATH: 2,
|
| 32 |
+
PATH_COUNT: 3,
|
| 33 |
+
NODE_COUNT: 4,
|
| 34 |
+
NODE_INCOMPATIBILITY: 5,
|
| 35 |
+
CONTOUR_ORDER: 6,
|
| 36 |
+
WRONG_START_POINT: 7,
|
| 37 |
+
KINK: 8,
|
| 38 |
+
UNDERWEIGHT: 9,
|
| 39 |
+
OVERWEIGHT: 10,
|
| 40 |
+
NOTHING: 11,
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def sort_problems(problems):
|
| 45 |
+
"""Sort problems by severity, then by glyph name, then by problem message."""
|
| 46 |
+
return dict(
|
| 47 |
+
sorted(
|
| 48 |
+
problems.items(),
|
| 49 |
+
key=lambda _: -min(
|
| 50 |
+
(
|
| 51 |
+
(InterpolatableProblem.severity[p["type"]] + p.get("tolerance", 0))
|
| 52 |
+
for p in _[1]
|
| 53 |
+
),
|
| 54 |
+
),
|
| 55 |
+
reverse=True,
|
| 56 |
+
)
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def rot_list(l, k):
|
| 61 |
+
"""Rotate list by k items forward. Ie. item at position 0 will be
|
| 62 |
+
at position k in returned list. Negative k is allowed."""
|
| 63 |
+
return l[-k:] + l[:-k]
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class PerContourPen(BasePen):
|
| 67 |
+
def __init__(self, Pen, glyphset=None):
|
| 68 |
+
BasePen.__init__(self, glyphset)
|
| 69 |
+
self._glyphset = glyphset
|
| 70 |
+
self._Pen = Pen
|
| 71 |
+
self._pen = None
|
| 72 |
+
self.value = []
|
| 73 |
+
|
| 74 |
+
def _moveTo(self, p0):
|
| 75 |
+
self._newItem()
|
| 76 |
+
self._pen.moveTo(p0)
|
| 77 |
+
|
| 78 |
+
def _lineTo(self, p1):
|
| 79 |
+
self._pen.lineTo(p1)
|
| 80 |
+
|
| 81 |
+
def _qCurveToOne(self, p1, p2):
|
| 82 |
+
self._pen.qCurveTo(p1, p2)
|
| 83 |
+
|
| 84 |
+
def _curveToOne(self, p1, p2, p3):
|
| 85 |
+
self._pen.curveTo(p1, p2, p3)
|
| 86 |
+
|
| 87 |
+
def _closePath(self):
|
| 88 |
+
self._pen.closePath()
|
| 89 |
+
self._pen = None
|
| 90 |
+
|
| 91 |
+
def _endPath(self):
|
| 92 |
+
self._pen.endPath()
|
| 93 |
+
self._pen = None
|
| 94 |
+
|
| 95 |
+
def _newItem(self):
|
| 96 |
+
self._pen = pen = self._Pen()
|
| 97 |
+
self.value.append(pen)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class PerContourOrComponentPen(PerContourPen):
|
| 101 |
+
def addComponent(self, glyphName, transformation):
|
| 102 |
+
self._newItem()
|
| 103 |
+
self.value[-1].addComponent(glyphName, transformation)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class SimpleRecordingPointPen(AbstractPointPen):
|
| 107 |
+
def __init__(self):
|
| 108 |
+
self.value = []
|
| 109 |
+
|
| 110 |
+
def beginPath(self, identifier=None, **kwargs):
|
| 111 |
+
pass
|
| 112 |
+
|
| 113 |
+
def endPath(self) -> None:
|
| 114 |
+
pass
|
| 115 |
+
|
| 116 |
+
def addPoint(self, pt, segmentType=None):
|
| 117 |
+
self.value.append((pt, False if segmentType is None else True))
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def vdiff_hypot2(v0, v1):
|
| 121 |
+
s = 0
|
| 122 |
+
for x0, x1 in zip(v0, v1):
|
| 123 |
+
d = x1 - x0
|
| 124 |
+
s += d * d
|
| 125 |
+
return s
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def vdiff_hypot2_complex(v0, v1):
|
| 129 |
+
s = 0
|
| 130 |
+
for x0, x1 in zip(v0, v1):
|
| 131 |
+
d = x1 - x0
|
| 132 |
+
s += d.real * d.real + d.imag * d.imag
|
| 133 |
+
# This does the same but seems to be slower:
|
| 134 |
+
# s += (d * d.conjugate()).real
|
| 135 |
+
return s
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def matching_cost(G, matching):
|
| 139 |
+
return sum(G[i][j] for i, j in enumerate(matching))
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def min_cost_perfect_bipartite_matching_scipy(G):
|
| 143 |
+
n = len(G)
|
| 144 |
+
rows, cols = linear_sum_assignment(G)
|
| 145 |
+
assert (rows == list(range(n))).all()
|
| 146 |
+
# Convert numpy array and integer to Python types,
|
| 147 |
+
# to ensure that this is JSON-serializable.
|
| 148 |
+
cols = list(int(e) for e in cols)
|
| 149 |
+
return list(cols), matching_cost(G, cols)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def min_cost_perfect_bipartite_matching_munkres(G):
|
| 153 |
+
n = len(G)
|
| 154 |
+
cols = [None] * n
|
| 155 |
+
for row, col in Munkres().compute(G):
|
| 156 |
+
cols[row] = col
|
| 157 |
+
return cols, matching_cost(G, cols)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def min_cost_perfect_bipartite_matching_bruteforce(G):
|
| 161 |
+
n = len(G)
|
| 162 |
+
|
| 163 |
+
if n > 6:
|
| 164 |
+
raise Exception("Install Python module 'munkres' or 'scipy >= 0.17.0'")
|
| 165 |
+
|
| 166 |
+
# Otherwise just brute-force
|
| 167 |
+
permutations = itertools.permutations(range(n))
|
| 168 |
+
best = list(next(permutations))
|
| 169 |
+
best_cost = matching_cost(G, best)
|
| 170 |
+
for p in permutations:
|
| 171 |
+
cost = matching_cost(G, p)
|
| 172 |
+
if cost < best_cost:
|
| 173 |
+
best, best_cost = list(p), cost
|
| 174 |
+
return best, best_cost
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
try:
|
| 178 |
+
from scipy.optimize import linear_sum_assignment
|
| 179 |
+
|
| 180 |
+
min_cost_perfect_bipartite_matching = min_cost_perfect_bipartite_matching_scipy
|
| 181 |
+
except ImportError:
|
| 182 |
+
try:
|
| 183 |
+
from munkres import Munkres
|
| 184 |
+
|
| 185 |
+
min_cost_perfect_bipartite_matching = (
|
| 186 |
+
min_cost_perfect_bipartite_matching_munkres
|
| 187 |
+
)
|
| 188 |
+
except ImportError:
|
| 189 |
+
min_cost_perfect_bipartite_matching = (
|
| 190 |
+
min_cost_perfect_bipartite_matching_bruteforce
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def contour_vector_from_stats(stats):
|
| 195 |
+
# Don't change the order of items here.
|
| 196 |
+
# It's okay to add to the end, but otherwise, other
|
| 197 |
+
# code depends on it. Search for "covariance".
|
| 198 |
+
size = sqrt(abs(stats.area))
|
| 199 |
+
return (
|
| 200 |
+
copysign((size), stats.area),
|
| 201 |
+
stats.meanX,
|
| 202 |
+
stats.meanY,
|
| 203 |
+
stats.stddevX * 2,
|
| 204 |
+
stats.stddevY * 2,
|
| 205 |
+
stats.correlation * size,
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def matching_for_vectors(m0, m1):
|
| 210 |
+
n = len(m0)
|
| 211 |
+
|
| 212 |
+
identity_matching = list(range(n))
|
| 213 |
+
|
| 214 |
+
costs = [[vdiff_hypot2(v0, v1) for v1 in m1] for v0 in m0]
|
| 215 |
+
(
|
| 216 |
+
matching,
|
| 217 |
+
matching_cost,
|
| 218 |
+
) = min_cost_perfect_bipartite_matching(costs)
|
| 219 |
+
identity_cost = sum(costs[i][i] for i in range(n))
|
| 220 |
+
return matching, matching_cost, identity_cost
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def points_characteristic_bits(points):
|
| 224 |
+
bits = 0
|
| 225 |
+
for pt, b in reversed(points):
|
| 226 |
+
bits = (bits << 1) | b
|
| 227 |
+
return bits
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
_NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR = 4
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def points_complex_vector(points):
|
| 234 |
+
vector = []
|
| 235 |
+
if not points:
|
| 236 |
+
return vector
|
| 237 |
+
points = [complex(*pt) for pt, _ in points]
|
| 238 |
+
n = len(points)
|
| 239 |
+
assert _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR == 4
|
| 240 |
+
points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
|
| 241 |
+
while len(points) < _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR:
|
| 242 |
+
points.extend(points[: _NUM_ITEMS_PER_POINTS_COMPLEX_VECTOR - 1])
|
| 243 |
+
for i in range(n):
|
| 244 |
+
# The weights are magic numbers.
|
| 245 |
+
|
| 246 |
+
# The point itself
|
| 247 |
+
p0 = points[i]
|
| 248 |
+
vector.append(p0)
|
| 249 |
+
|
| 250 |
+
# The vector to the next point
|
| 251 |
+
p1 = points[i + 1]
|
| 252 |
+
d0 = p1 - p0
|
| 253 |
+
vector.append(d0 * 3)
|
| 254 |
+
|
| 255 |
+
# The turn vector
|
| 256 |
+
p2 = points[i + 2]
|
| 257 |
+
d1 = p2 - p1
|
| 258 |
+
vector.append(d1 - d0)
|
| 259 |
+
|
| 260 |
+
# The angle to the next point, as a cross product;
|
| 261 |
+
# Square root of, to match dimentionality of distance.
|
| 262 |
+
cross = d0.real * d1.imag - d0.imag * d1.real
|
| 263 |
+
cross = copysign(sqrt(abs(cross)), cross)
|
| 264 |
+
vector.append(cross * 4)
|
| 265 |
+
|
| 266 |
+
return vector
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def add_isomorphisms(points, isomorphisms, reverse):
|
| 270 |
+
reference_bits = points_characteristic_bits(points)
|
| 271 |
+
n = len(points)
|
| 272 |
+
|
| 273 |
+
# if points[0][0] == points[-1][0]:
|
| 274 |
+
# abort
|
| 275 |
+
|
| 276 |
+
if reverse:
|
| 277 |
+
points = points[::-1]
|
| 278 |
+
bits = points_characteristic_bits(points)
|
| 279 |
+
else:
|
| 280 |
+
bits = reference_bits
|
| 281 |
+
|
| 282 |
+
vector = points_complex_vector(points)
|
| 283 |
+
|
| 284 |
+
assert len(vector) % n == 0
|
| 285 |
+
mult = len(vector) // n
|
| 286 |
+
mask = (1 << n) - 1
|
| 287 |
+
|
| 288 |
+
for i in range(n):
|
| 289 |
+
b = ((bits << (n - i)) & mask) | (bits >> i)
|
| 290 |
+
if b == reference_bits:
|
| 291 |
+
isomorphisms.append(
|
| 292 |
+
(rot_list(vector, -i * mult), n - 1 - i if reverse else i, reverse)
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
def find_parents_and_order(glyphsets, locations, *, discrete_axes=set()):
|
| 297 |
+
parents = [None] + list(range(len(glyphsets) - 1))
|
| 298 |
+
order = list(range(len(glyphsets)))
|
| 299 |
+
if locations:
|
| 300 |
+
# Order base master first
|
| 301 |
+
bases = [
|
| 302 |
+
i
|
| 303 |
+
for i, l in enumerate(locations)
|
| 304 |
+
if all(v == 0 for k, v in l.items() if k not in discrete_axes)
|
| 305 |
+
]
|
| 306 |
+
if bases:
|
| 307 |
+
logging.info("Found %s base masters: %s", len(bases), bases)
|
| 308 |
+
else:
|
| 309 |
+
logging.warning("No base master location found")
|
| 310 |
+
|
| 311 |
+
# Form a minimum spanning tree of the locations
|
| 312 |
+
try:
|
| 313 |
+
from scipy.sparse.csgraph import minimum_spanning_tree
|
| 314 |
+
|
| 315 |
+
graph = [[0] * len(locations) for _ in range(len(locations))]
|
| 316 |
+
axes = set()
|
| 317 |
+
for l in locations:
|
| 318 |
+
axes.update(l.keys())
|
| 319 |
+
axes = sorted(axes)
|
| 320 |
+
vectors = [tuple(l.get(k, 0) for k in axes) for l in locations]
|
| 321 |
+
for i, j in itertools.combinations(range(len(locations)), 2):
|
| 322 |
+
i_discrete_location = {
|
| 323 |
+
k: v for k, v in zip(axes, vectors[i]) if k in discrete_axes
|
| 324 |
+
}
|
| 325 |
+
j_discrete_location = {
|
| 326 |
+
k: v for k, v in zip(axes, vectors[j]) if k in discrete_axes
|
| 327 |
+
}
|
| 328 |
+
if i_discrete_location != j_discrete_location:
|
| 329 |
+
continue
|
| 330 |
+
graph[i][j] = vdiff_hypot2(vectors[i], vectors[j])
|
| 331 |
+
|
| 332 |
+
tree = minimum_spanning_tree(graph, overwrite=True)
|
| 333 |
+
rows, cols = tree.nonzero()
|
| 334 |
+
graph = defaultdict(set)
|
| 335 |
+
for row, col in zip(rows, cols):
|
| 336 |
+
graph[row].add(col)
|
| 337 |
+
graph[col].add(row)
|
| 338 |
+
|
| 339 |
+
# Traverse graph from the base and assign parents
|
| 340 |
+
parents = [None] * len(locations)
|
| 341 |
+
order = []
|
| 342 |
+
visited = set()
|
| 343 |
+
queue = deque(bases)
|
| 344 |
+
while queue:
|
| 345 |
+
i = queue.popleft()
|
| 346 |
+
visited.add(i)
|
| 347 |
+
order.append(i)
|
| 348 |
+
for j in sorted(graph[i]):
|
| 349 |
+
if j not in visited:
|
| 350 |
+
parents[j] = i
|
| 351 |
+
queue.append(j)
|
| 352 |
+
assert len(order) == len(
|
| 353 |
+
parents
|
| 354 |
+
), "Not all masters are reachable; report an issue"
|
| 355 |
+
|
| 356 |
+
except ImportError:
|
| 357 |
+
pass
|
| 358 |
+
|
| 359 |
+
log.info("Parents: %s", parents)
|
| 360 |
+
log.info("Order: %s", order)
|
| 361 |
+
return parents, order
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def transform_from_stats(stats, inverse=False):
|
| 365 |
+
# https://cookierobotics.com/007/
|
| 366 |
+
a = stats.varianceX
|
| 367 |
+
b = stats.covariance
|
| 368 |
+
c = stats.varianceY
|
| 369 |
+
|
| 370 |
+
delta = (((a - c) * 0.5) ** 2 + b * b) ** 0.5
|
| 371 |
+
lambda1 = (a + c) * 0.5 + delta # Major eigenvalue
|
| 372 |
+
lambda2 = (a + c) * 0.5 - delta # Minor eigenvalue
|
| 373 |
+
theta = atan2(lambda1 - a, b) if b != 0 else (pi * 0.5 if a < c else 0)
|
| 374 |
+
trans = Transform()
|
| 375 |
+
|
| 376 |
+
if lambda2 < 0:
|
| 377 |
+
# XXX This is a hack.
|
| 378 |
+
# The problem is that the covariance matrix is singular.
|
| 379 |
+
# This happens when the contour is a line, or a circle.
|
| 380 |
+
# In that case, the covariance matrix is not a good
|
| 381 |
+
# representation of the contour.
|
| 382 |
+
# We should probably detect this earlier and avoid
|
| 383 |
+
# computing the covariance matrix in the first place.
|
| 384 |
+
# But for now, we just avoid the division by zero.
|
| 385 |
+
lambda2 = 0
|
| 386 |
+
|
| 387 |
+
if inverse:
|
| 388 |
+
trans = trans.translate(-stats.meanX, -stats.meanY)
|
| 389 |
+
trans = trans.rotate(-theta)
|
| 390 |
+
trans = trans.scale(1 / sqrt(lambda1), 1 / sqrt(lambda2))
|
| 391 |
+
else:
|
| 392 |
+
trans = trans.scale(sqrt(lambda1), sqrt(lambda2))
|
| 393 |
+
trans = trans.rotate(theta)
|
| 394 |
+
trans = trans.translate(stats.meanX, stats.meanY)
|
| 395 |
+
|
| 396 |
+
return trans
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/interpolatablePlot.py
ADDED
|
@@ -0,0 +1,1269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .interpolatableHelpers import *
|
| 2 |
+
from fontTools.ttLib import TTFont
|
| 3 |
+
from fontTools.ttLib.ttGlyphSet import LerpGlyphSet
|
| 4 |
+
from fontTools.pens.recordingPen import (
|
| 5 |
+
RecordingPen,
|
| 6 |
+
DecomposingRecordingPen,
|
| 7 |
+
RecordingPointPen,
|
| 8 |
+
)
|
| 9 |
+
from fontTools.pens.boundsPen import ControlBoundsPen
|
| 10 |
+
from fontTools.pens.cairoPen import CairoPen
|
| 11 |
+
from fontTools.pens.pointPen import (
|
| 12 |
+
SegmentToPointPen,
|
| 13 |
+
PointToSegmentPen,
|
| 14 |
+
ReverseContourPointPen,
|
| 15 |
+
)
|
| 16 |
+
from fontTools.varLib.interpolatableHelpers import (
|
| 17 |
+
PerContourOrComponentPen,
|
| 18 |
+
SimpleRecordingPointPen,
|
| 19 |
+
)
|
| 20 |
+
from itertools import cycle
|
| 21 |
+
from functools import wraps
|
| 22 |
+
from io import BytesIO
|
| 23 |
+
import cairo
|
| 24 |
+
import math
|
| 25 |
+
import os
|
| 26 |
+
import logging
|
| 27 |
+
|
| 28 |
+
log = logging.getLogger("fontTools.varLib.interpolatable")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class OverridingDict(dict):
|
| 32 |
+
def __init__(self, parent_dict):
|
| 33 |
+
self.parent_dict = parent_dict
|
| 34 |
+
|
| 35 |
+
def __missing__(self, key):
|
| 36 |
+
return self.parent_dict[key]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class InterpolatablePlot:
|
| 40 |
+
width = 8.5 * 72
|
| 41 |
+
height = 11 * 72
|
| 42 |
+
pad = 0.1 * 72
|
| 43 |
+
title_font_size = 24
|
| 44 |
+
font_size = 16
|
| 45 |
+
page_number = 1
|
| 46 |
+
head_color = (0.3, 0.3, 0.3)
|
| 47 |
+
label_color = (0.2, 0.2, 0.2)
|
| 48 |
+
border_color = (0.9, 0.9, 0.9)
|
| 49 |
+
border_width = 0.5
|
| 50 |
+
fill_color = (0.8, 0.8, 0.8)
|
| 51 |
+
stroke_color = (0.1, 0.1, 0.1)
|
| 52 |
+
stroke_width = 1
|
| 53 |
+
oncurve_node_color = (0, 0.8, 0, 0.7)
|
| 54 |
+
oncurve_node_diameter = 6
|
| 55 |
+
offcurve_node_color = (0, 0.5, 0, 0.7)
|
| 56 |
+
offcurve_node_diameter = 4
|
| 57 |
+
handle_color = (0, 0.5, 0, 0.7)
|
| 58 |
+
handle_width = 0.5
|
| 59 |
+
corrected_start_point_color = (0, 0.9, 0, 0.7)
|
| 60 |
+
corrected_start_point_size = 7
|
| 61 |
+
wrong_start_point_color = (1, 0, 0, 0.7)
|
| 62 |
+
start_point_color = (0, 0, 1, 0.7)
|
| 63 |
+
start_arrow_length = 9
|
| 64 |
+
kink_point_size = 7
|
| 65 |
+
kink_point_color = (1, 0, 1, 0.7)
|
| 66 |
+
kink_circle_size = 15
|
| 67 |
+
kink_circle_stroke_width = 1
|
| 68 |
+
kink_circle_color = (1, 0, 1, 0.7)
|
| 69 |
+
contour_colors = ((1, 0, 0), (0, 0, 1), (0, 1, 0), (1, 1, 0), (1, 0, 1), (0, 1, 1))
|
| 70 |
+
contour_alpha = 0.5
|
| 71 |
+
weight_issue_contour_color = (0, 0, 0, 0.4)
|
| 72 |
+
no_issues_label = "Your font's good! Have a cupcake..."
|
| 73 |
+
no_issues_label_color = (0, 0.5, 0)
|
| 74 |
+
cupcake_color = (0.3, 0, 0.3)
|
| 75 |
+
cupcake = r"""
|
| 76 |
+
,@.
|
| 77 |
+
,@.@@,.
|
| 78 |
+
,@@,.@@@. @.@@@,.
|
| 79 |
+
,@@. @@@. @@. @@,.
|
| 80 |
+
,@@@.@,.@. @. @@@@,.@.@@,.
|
| 81 |
+
,@@.@. @@.@@. @,. .@' @' @@,
|
| 82 |
+
,@@. @. .@@.@@@. @@' @,
|
| 83 |
+
,@. @@. @,
|
| 84 |
+
@. @,@@,. , .@@,
|
| 85 |
+
@,. .@,@@,. .@@,. , .@@, @, @,
|
| 86 |
+
@. .@. @ @@,. , @
|
| 87 |
+
@,.@@. @,. @@,. @. @,. @'
|
| 88 |
+
@@||@,. @'@,. @@,. @@ @,. @'@@, @'
|
| 89 |
+
\\@@@@' @,. @'@@@@' @@,. @@@' //@@@'
|
| 90 |
+
|||||||| @@,. @@' ||||||| |@@@|@|| ||
|
| 91 |
+
\\\\\\\ ||@@@|| ||||||| ||||||| //
|
| 92 |
+
||||||| |||||| |||||| |||||| ||
|
| 93 |
+
\\\\\\ |||||| |||||| |||||| //
|
| 94 |
+
|||||| ||||| ||||| ||||| ||
|
| 95 |
+
\\\\\ ||||| ||||| ||||| //
|
| 96 |
+
||||| |||| ||||| |||| ||
|
| 97 |
+
\\\\ |||| |||| |||| //
|
| 98 |
+
||||||||||||||||||||||||
|
| 99 |
+
"""
|
| 100 |
+
emoticon_color = (0, 0.3, 0.3)
|
| 101 |
+
shrug = r"""\_(")_/"""
|
| 102 |
+
underweight = r"""
|
| 103 |
+
o
|
| 104 |
+
/|\
|
| 105 |
+
/ \
|
| 106 |
+
"""
|
| 107 |
+
overweight = r"""
|
| 108 |
+
o
|
| 109 |
+
/O\
|
| 110 |
+
/ \
|
| 111 |
+
"""
|
| 112 |
+
yay = r""" \o/ """
|
| 113 |
+
|
| 114 |
+
def __init__(self, out, glyphsets, names=None, **kwargs):
|
| 115 |
+
self.out = out
|
| 116 |
+
self.glyphsets = glyphsets
|
| 117 |
+
self.names = names or [repr(g) for g in glyphsets]
|
| 118 |
+
self.toc = {}
|
| 119 |
+
|
| 120 |
+
for k, v in kwargs.items():
|
| 121 |
+
if not hasattr(self, k):
|
| 122 |
+
raise TypeError("Unknown keyword argument: %s" % k)
|
| 123 |
+
setattr(self, k, v)
|
| 124 |
+
|
| 125 |
+
self.panel_width = self.width / 2 - self.pad * 3
|
| 126 |
+
self.panel_height = (
|
| 127 |
+
self.height / 2 - self.pad * 6 - self.font_size * 2 - self.title_font_size
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
def __enter__(self):
|
| 131 |
+
return self
|
| 132 |
+
|
| 133 |
+
def __exit__(self, type, value, traceback):
|
| 134 |
+
pass
|
| 135 |
+
|
| 136 |
+
def show_page(self):
|
| 137 |
+
self.page_number += 1
|
| 138 |
+
|
| 139 |
+
def add_title_page(
|
| 140 |
+
self, files, *, show_tolerance=True, tolerance=None, kinkiness=None
|
| 141 |
+
):
|
| 142 |
+
pad = self.pad
|
| 143 |
+
width = self.width - 3 * self.pad
|
| 144 |
+
height = self.height - 2 * self.pad
|
| 145 |
+
x = y = pad
|
| 146 |
+
|
| 147 |
+
self.draw_label(
|
| 148 |
+
"Problem report for:",
|
| 149 |
+
x=x,
|
| 150 |
+
y=y,
|
| 151 |
+
bold=True,
|
| 152 |
+
width=width,
|
| 153 |
+
font_size=self.title_font_size,
|
| 154 |
+
)
|
| 155 |
+
y += self.title_font_size
|
| 156 |
+
|
| 157 |
+
import hashlib
|
| 158 |
+
|
| 159 |
+
for file in files:
|
| 160 |
+
base_file = os.path.basename(file)
|
| 161 |
+
y += self.font_size + self.pad
|
| 162 |
+
self.draw_label(base_file, x=x, y=y, bold=True, width=width)
|
| 163 |
+
y += self.font_size + self.pad
|
| 164 |
+
|
| 165 |
+
try:
|
| 166 |
+
h = hashlib.sha1(open(file, "rb").read()).hexdigest()
|
| 167 |
+
self.draw_label("sha1: %s" % h, x=x + pad, y=y, width=width)
|
| 168 |
+
y += self.font_size
|
| 169 |
+
except IsADirectoryError:
|
| 170 |
+
pass
|
| 171 |
+
|
| 172 |
+
if file.endswith(".ttf"):
|
| 173 |
+
ttFont = TTFont(file)
|
| 174 |
+
name = ttFont["name"] if "name" in ttFont else None
|
| 175 |
+
if name:
|
| 176 |
+
for what, nameIDs in (
|
| 177 |
+
("Family name", (21, 16, 1)),
|
| 178 |
+
("Version", (5,)),
|
| 179 |
+
):
|
| 180 |
+
n = name.getFirstDebugName(nameIDs)
|
| 181 |
+
if n is None:
|
| 182 |
+
continue
|
| 183 |
+
self.draw_label(
|
| 184 |
+
"%s: %s" % (what, n), x=x + pad, y=y, width=width
|
| 185 |
+
)
|
| 186 |
+
y += self.font_size + self.pad
|
| 187 |
+
elif file.endswith((".glyphs", ".glyphspackage")):
|
| 188 |
+
from glyphsLib import GSFont
|
| 189 |
+
|
| 190 |
+
f = GSFont(file)
|
| 191 |
+
for what, field in (
|
| 192 |
+
("Family name", "familyName"),
|
| 193 |
+
("VersionMajor", "versionMajor"),
|
| 194 |
+
("VersionMinor", "_versionMinor"),
|
| 195 |
+
):
|
| 196 |
+
self.draw_label(
|
| 197 |
+
"%s: %s" % (what, getattr(f, field)),
|
| 198 |
+
x=x + pad,
|
| 199 |
+
y=y,
|
| 200 |
+
width=width,
|
| 201 |
+
)
|
| 202 |
+
y += self.font_size + self.pad
|
| 203 |
+
|
| 204 |
+
self.draw_legend(
|
| 205 |
+
show_tolerance=show_tolerance, tolerance=tolerance, kinkiness=kinkiness
|
| 206 |
+
)
|
| 207 |
+
self.show_page()
|
| 208 |
+
|
| 209 |
+
def draw_legend(self, *, show_tolerance=True, tolerance=None, kinkiness=None):
|
| 210 |
+
cr = cairo.Context(self.surface)
|
| 211 |
+
|
| 212 |
+
x = self.pad
|
| 213 |
+
y = self.height - self.pad - self.font_size * 2
|
| 214 |
+
width = self.width - 2 * self.pad
|
| 215 |
+
|
| 216 |
+
xx = x + self.pad * 2
|
| 217 |
+
xxx = x + self.pad * 4
|
| 218 |
+
|
| 219 |
+
if show_tolerance:
|
| 220 |
+
self.draw_label(
|
| 221 |
+
"Tolerance: badness; closer to zero the worse", x=xxx, y=y, width=width
|
| 222 |
+
)
|
| 223 |
+
y -= self.pad + self.font_size
|
| 224 |
+
|
| 225 |
+
self.draw_label("Underweight contours", x=xxx, y=y, width=width)
|
| 226 |
+
cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.font_size)
|
| 227 |
+
cr.set_source_rgb(*self.fill_color)
|
| 228 |
+
cr.fill_preserve()
|
| 229 |
+
if self.stroke_color:
|
| 230 |
+
cr.set_source_rgb(*self.stroke_color)
|
| 231 |
+
cr.set_line_width(self.stroke_width)
|
| 232 |
+
cr.stroke_preserve()
|
| 233 |
+
cr.set_source_rgba(*self.weight_issue_contour_color)
|
| 234 |
+
cr.fill()
|
| 235 |
+
y -= self.pad + self.font_size
|
| 236 |
+
|
| 237 |
+
self.draw_label(
|
| 238 |
+
"Colored contours: contours with the wrong order", x=xxx, y=y, width=width
|
| 239 |
+
)
|
| 240 |
+
cr.rectangle(xx - self.pad * 0.7, y, 1.5 * self.pad, self.font_size)
|
| 241 |
+
if self.fill_color:
|
| 242 |
+
cr.set_source_rgb(*self.fill_color)
|
| 243 |
+
cr.fill_preserve()
|
| 244 |
+
if self.stroke_color:
|
| 245 |
+
cr.set_source_rgb(*self.stroke_color)
|
| 246 |
+
cr.set_line_width(self.stroke_width)
|
| 247 |
+
cr.stroke_preserve()
|
| 248 |
+
cr.set_source_rgba(*self.contour_colors[0], self.contour_alpha)
|
| 249 |
+
cr.fill()
|
| 250 |
+
y -= self.pad + self.font_size
|
| 251 |
+
|
| 252 |
+
self.draw_label("Kink artifact", x=xxx, y=y, width=width)
|
| 253 |
+
self.draw_circle(
|
| 254 |
+
cr,
|
| 255 |
+
x=xx,
|
| 256 |
+
y=y + self.font_size * 0.5,
|
| 257 |
+
diameter=self.kink_circle_size,
|
| 258 |
+
stroke_width=self.kink_circle_stroke_width,
|
| 259 |
+
color=self.kink_circle_color,
|
| 260 |
+
)
|
| 261 |
+
y -= self.pad + self.font_size
|
| 262 |
+
|
| 263 |
+
self.draw_label("Point causing kink in the contour", x=xxx, y=y, width=width)
|
| 264 |
+
self.draw_dot(
|
| 265 |
+
cr,
|
| 266 |
+
x=xx,
|
| 267 |
+
y=y + self.font_size * 0.5,
|
| 268 |
+
diameter=self.kink_point_size,
|
| 269 |
+
color=self.kink_point_color,
|
| 270 |
+
)
|
| 271 |
+
y -= self.pad + self.font_size
|
| 272 |
+
|
| 273 |
+
self.draw_label("Suggested new contour start point", x=xxx, y=y, width=width)
|
| 274 |
+
self.draw_dot(
|
| 275 |
+
cr,
|
| 276 |
+
x=xx,
|
| 277 |
+
y=y + self.font_size * 0.5,
|
| 278 |
+
diameter=self.corrected_start_point_size,
|
| 279 |
+
color=self.corrected_start_point_color,
|
| 280 |
+
)
|
| 281 |
+
y -= self.pad + self.font_size
|
| 282 |
+
|
| 283 |
+
self.draw_label(
|
| 284 |
+
"Contour start point in contours with wrong direction",
|
| 285 |
+
x=xxx,
|
| 286 |
+
y=y,
|
| 287 |
+
width=width,
|
| 288 |
+
)
|
| 289 |
+
self.draw_arrow(
|
| 290 |
+
cr,
|
| 291 |
+
x=xx - self.start_arrow_length * 0.3,
|
| 292 |
+
y=y + self.font_size * 0.5,
|
| 293 |
+
color=self.wrong_start_point_color,
|
| 294 |
+
)
|
| 295 |
+
y -= self.pad + self.font_size
|
| 296 |
+
|
| 297 |
+
self.draw_label(
|
| 298 |
+
"Contour start point when the first two points overlap",
|
| 299 |
+
x=xxx,
|
| 300 |
+
y=y,
|
| 301 |
+
width=width,
|
| 302 |
+
)
|
| 303 |
+
self.draw_dot(
|
| 304 |
+
cr,
|
| 305 |
+
x=xx,
|
| 306 |
+
y=y + self.font_size * 0.5,
|
| 307 |
+
diameter=self.corrected_start_point_size,
|
| 308 |
+
color=self.start_point_color,
|
| 309 |
+
)
|
| 310 |
+
y -= self.pad + self.font_size
|
| 311 |
+
|
| 312 |
+
self.draw_label("Contour start point and direction", x=xxx, y=y, width=width)
|
| 313 |
+
self.draw_arrow(
|
| 314 |
+
cr,
|
| 315 |
+
x=xx - self.start_arrow_length * 0.3,
|
| 316 |
+
y=y + self.font_size * 0.5,
|
| 317 |
+
color=self.start_point_color,
|
| 318 |
+
)
|
| 319 |
+
y -= self.pad + self.font_size
|
| 320 |
+
|
| 321 |
+
self.draw_label("Legend:", x=x, y=y, width=width, bold=True)
|
| 322 |
+
y -= self.pad + self.font_size
|
| 323 |
+
|
| 324 |
+
if kinkiness is not None:
|
| 325 |
+
self.draw_label(
|
| 326 |
+
"Kink-reporting aggressiveness: %g" % kinkiness,
|
| 327 |
+
x=xxx,
|
| 328 |
+
y=y,
|
| 329 |
+
width=width,
|
| 330 |
+
)
|
| 331 |
+
y -= self.pad + self.font_size
|
| 332 |
+
|
| 333 |
+
if tolerance is not None:
|
| 334 |
+
self.draw_label(
|
| 335 |
+
"Error tolerance: %g" % tolerance,
|
| 336 |
+
x=xxx,
|
| 337 |
+
y=y,
|
| 338 |
+
width=width,
|
| 339 |
+
)
|
| 340 |
+
y -= self.pad + self.font_size
|
| 341 |
+
|
| 342 |
+
self.draw_label("Parameters:", x=x, y=y, width=width, bold=True)
|
| 343 |
+
y -= self.pad + self.font_size
|
| 344 |
+
|
| 345 |
+
def add_summary(self, problems):
|
| 346 |
+
pad = self.pad
|
| 347 |
+
width = self.width - 3 * self.pad
|
| 348 |
+
height = self.height - 2 * self.pad
|
| 349 |
+
x = y = pad
|
| 350 |
+
|
| 351 |
+
self.draw_label(
|
| 352 |
+
"Summary of problems",
|
| 353 |
+
x=x,
|
| 354 |
+
y=y,
|
| 355 |
+
bold=True,
|
| 356 |
+
width=width,
|
| 357 |
+
font_size=self.title_font_size,
|
| 358 |
+
)
|
| 359 |
+
y += self.title_font_size
|
| 360 |
+
|
| 361 |
+
glyphs_per_problem = defaultdict(set)
|
| 362 |
+
for glyphname, problems in sorted(problems.items()):
|
| 363 |
+
for problem in problems:
|
| 364 |
+
glyphs_per_problem[problem["type"]].add(glyphname)
|
| 365 |
+
|
| 366 |
+
if "nothing" in glyphs_per_problem:
|
| 367 |
+
del glyphs_per_problem["nothing"]
|
| 368 |
+
|
| 369 |
+
for problem_type in sorted(
|
| 370 |
+
glyphs_per_problem, key=lambda x: InterpolatableProblem.severity[x]
|
| 371 |
+
):
|
| 372 |
+
y += self.font_size
|
| 373 |
+
self.draw_label(
|
| 374 |
+
"%s: %d" % (problem_type, len(glyphs_per_problem[problem_type])),
|
| 375 |
+
x=x,
|
| 376 |
+
y=y,
|
| 377 |
+
width=width,
|
| 378 |
+
bold=True,
|
| 379 |
+
)
|
| 380 |
+
y += self.font_size
|
| 381 |
+
|
| 382 |
+
for glyphname in sorted(glyphs_per_problem[problem_type]):
|
| 383 |
+
if y + self.font_size > height:
|
| 384 |
+
self.show_page()
|
| 385 |
+
y = self.font_size + pad
|
| 386 |
+
self.draw_label(glyphname, x=x + 2 * pad, y=y, width=width - 2 * pad)
|
| 387 |
+
y += self.font_size
|
| 388 |
+
|
| 389 |
+
self.show_page()
|
| 390 |
+
|
| 391 |
+
def _add_listing(self, title, items):
|
| 392 |
+
pad = self.pad
|
| 393 |
+
width = self.width - 2 * self.pad
|
| 394 |
+
height = self.height - 2 * self.pad
|
| 395 |
+
x = y = pad
|
| 396 |
+
|
| 397 |
+
self.draw_label(
|
| 398 |
+
title, x=x, y=y, bold=True, width=width, font_size=self.title_font_size
|
| 399 |
+
)
|
| 400 |
+
y += self.title_font_size + self.pad
|
| 401 |
+
|
| 402 |
+
last_glyphname = None
|
| 403 |
+
for page_no, (glyphname, problems) in items:
|
| 404 |
+
if glyphname == last_glyphname:
|
| 405 |
+
continue
|
| 406 |
+
last_glyphname = glyphname
|
| 407 |
+
if y + self.font_size > height:
|
| 408 |
+
self.show_page()
|
| 409 |
+
y = self.font_size + pad
|
| 410 |
+
self.draw_label(glyphname, x=x + 5 * pad, y=y, width=width - 2 * pad)
|
| 411 |
+
self.draw_label(str(page_no), x=x, y=y, width=4 * pad, align=1)
|
| 412 |
+
y += self.font_size
|
| 413 |
+
|
| 414 |
+
self.show_page()
|
| 415 |
+
|
| 416 |
+
def add_table_of_contents(self):
|
| 417 |
+
self._add_listing("Table of contents", sorted(self.toc.items()))
|
| 418 |
+
|
| 419 |
+
def add_index(self):
|
| 420 |
+
self._add_listing("Index", sorted(self.toc.items(), key=lambda x: x[1][0]))
|
| 421 |
+
|
| 422 |
+
def add_problems(self, problems, *, show_tolerance=True, show_page_number=True):
|
| 423 |
+
for glyph, glyph_problems in problems.items():
|
| 424 |
+
last_masters = None
|
| 425 |
+
current_glyph_problems = []
|
| 426 |
+
for p in glyph_problems:
|
| 427 |
+
masters = (
|
| 428 |
+
p["master_idx"]
|
| 429 |
+
if "master_idx" in p
|
| 430 |
+
else (p["master_1_idx"], p["master_2_idx"])
|
| 431 |
+
)
|
| 432 |
+
if masters == last_masters:
|
| 433 |
+
current_glyph_problems.append(p)
|
| 434 |
+
continue
|
| 435 |
+
# Flush
|
| 436 |
+
if current_glyph_problems:
|
| 437 |
+
self.add_problem(
|
| 438 |
+
glyph,
|
| 439 |
+
current_glyph_problems,
|
| 440 |
+
show_tolerance=show_tolerance,
|
| 441 |
+
show_page_number=show_page_number,
|
| 442 |
+
)
|
| 443 |
+
self.show_page()
|
| 444 |
+
current_glyph_problems = []
|
| 445 |
+
last_masters = masters
|
| 446 |
+
current_glyph_problems.append(p)
|
| 447 |
+
if current_glyph_problems:
|
| 448 |
+
self.add_problem(
|
| 449 |
+
glyph,
|
| 450 |
+
current_glyph_problems,
|
| 451 |
+
show_tolerance=show_tolerance,
|
| 452 |
+
show_page_number=show_page_number,
|
| 453 |
+
)
|
| 454 |
+
self.show_page()
|
| 455 |
+
|
| 456 |
+
def add_problem(
|
| 457 |
+
self, glyphname, problems, *, show_tolerance=True, show_page_number=True
|
| 458 |
+
):
|
| 459 |
+
if type(problems) not in (list, tuple):
|
| 460 |
+
problems = [problems]
|
| 461 |
+
|
| 462 |
+
self.toc[self.page_number] = (glyphname, problems)
|
| 463 |
+
|
| 464 |
+
problem_type = problems[0]["type"]
|
| 465 |
+
problem_types = set(problem["type"] for problem in problems)
|
| 466 |
+
if not all(pt == problem_type for pt in problem_types):
|
| 467 |
+
problem_type = ", ".join(sorted({problem["type"] for problem in problems}))
|
| 468 |
+
|
| 469 |
+
log.info("Drawing %s: %s", glyphname, problem_type)
|
| 470 |
+
|
| 471 |
+
master_keys = (
|
| 472 |
+
("master_idx",)
|
| 473 |
+
if "master_idx" in problems[0]
|
| 474 |
+
else ("master_1_idx", "master_2_idx")
|
| 475 |
+
)
|
| 476 |
+
master_indices = [problems[0][k] for k in master_keys]
|
| 477 |
+
|
| 478 |
+
if problem_type == InterpolatableProblem.MISSING:
|
| 479 |
+
sample_glyph = next(
|
| 480 |
+
i for i, m in enumerate(self.glyphsets) if m[glyphname] is not None
|
| 481 |
+
)
|
| 482 |
+
master_indices.insert(0, sample_glyph)
|
| 483 |
+
|
| 484 |
+
x = self.pad
|
| 485 |
+
y = self.pad
|
| 486 |
+
|
| 487 |
+
self.draw_label(
|
| 488 |
+
"Glyph name: " + glyphname,
|
| 489 |
+
x=x,
|
| 490 |
+
y=y,
|
| 491 |
+
color=self.head_color,
|
| 492 |
+
align=0,
|
| 493 |
+
bold=True,
|
| 494 |
+
font_size=self.title_font_size,
|
| 495 |
+
)
|
| 496 |
+
tolerance = min(p.get("tolerance", 1) for p in problems)
|
| 497 |
+
if tolerance < 1 and show_tolerance:
|
| 498 |
+
self.draw_label(
|
| 499 |
+
"tolerance: %.2f" % tolerance,
|
| 500 |
+
x=x,
|
| 501 |
+
y=y,
|
| 502 |
+
width=self.width - 2 * self.pad,
|
| 503 |
+
align=1,
|
| 504 |
+
bold=True,
|
| 505 |
+
)
|
| 506 |
+
y += self.title_font_size + self.pad
|
| 507 |
+
self.draw_label(
|
| 508 |
+
"Problems: " + problem_type,
|
| 509 |
+
x=x,
|
| 510 |
+
y=y,
|
| 511 |
+
width=self.width - 2 * self.pad,
|
| 512 |
+
color=self.head_color,
|
| 513 |
+
bold=True,
|
| 514 |
+
)
|
| 515 |
+
y += self.font_size + self.pad * 2
|
| 516 |
+
|
| 517 |
+
scales = []
|
| 518 |
+
for which, master_idx in enumerate(master_indices):
|
| 519 |
+
glyphset = self.glyphsets[master_idx]
|
| 520 |
+
name = self.names[master_idx]
|
| 521 |
+
|
| 522 |
+
self.draw_label(
|
| 523 |
+
name,
|
| 524 |
+
x=x,
|
| 525 |
+
y=y,
|
| 526 |
+
color=self.label_color,
|
| 527 |
+
width=self.panel_width,
|
| 528 |
+
align=0.5,
|
| 529 |
+
)
|
| 530 |
+
y += self.font_size + self.pad
|
| 531 |
+
|
| 532 |
+
if glyphset[glyphname] is not None:
|
| 533 |
+
scales.append(
|
| 534 |
+
self.draw_glyph(glyphset, glyphname, problems, which, x=x, y=y)
|
| 535 |
+
)
|
| 536 |
+
else:
|
| 537 |
+
self.draw_emoticon(self.shrug, x=x, y=y)
|
| 538 |
+
y += self.panel_height + self.font_size + self.pad
|
| 539 |
+
|
| 540 |
+
if any(
|
| 541 |
+
pt
|
| 542 |
+
in (
|
| 543 |
+
InterpolatableProblem.NOTHING,
|
| 544 |
+
InterpolatableProblem.WRONG_START_POINT,
|
| 545 |
+
InterpolatableProblem.CONTOUR_ORDER,
|
| 546 |
+
InterpolatableProblem.KINK,
|
| 547 |
+
InterpolatableProblem.UNDERWEIGHT,
|
| 548 |
+
InterpolatableProblem.OVERWEIGHT,
|
| 549 |
+
)
|
| 550 |
+
for pt in problem_types
|
| 551 |
+
):
|
| 552 |
+
x = self.pad + self.panel_width + self.pad
|
| 553 |
+
y = self.pad
|
| 554 |
+
y += self.title_font_size + self.pad * 2
|
| 555 |
+
y += self.font_size + self.pad
|
| 556 |
+
|
| 557 |
+
glyphset1 = self.glyphsets[master_indices[0]]
|
| 558 |
+
glyphset2 = self.glyphsets[master_indices[1]]
|
| 559 |
+
|
| 560 |
+
# Draw the mid-way of the two masters
|
| 561 |
+
|
| 562 |
+
self.draw_label(
|
| 563 |
+
"midway interpolation",
|
| 564 |
+
x=x,
|
| 565 |
+
y=y,
|
| 566 |
+
color=self.head_color,
|
| 567 |
+
width=self.panel_width,
|
| 568 |
+
align=0.5,
|
| 569 |
+
)
|
| 570 |
+
y += self.font_size + self.pad
|
| 571 |
+
|
| 572 |
+
midway_glyphset = LerpGlyphSet(glyphset1, glyphset2)
|
| 573 |
+
self.draw_glyph(
|
| 574 |
+
midway_glyphset,
|
| 575 |
+
glyphname,
|
| 576 |
+
[{"type": "midway"}]
|
| 577 |
+
+ [
|
| 578 |
+
p
|
| 579 |
+
for p in problems
|
| 580 |
+
if p["type"]
|
| 581 |
+
in (
|
| 582 |
+
InterpolatableProblem.KINK,
|
| 583 |
+
InterpolatableProblem.UNDERWEIGHT,
|
| 584 |
+
InterpolatableProblem.OVERWEIGHT,
|
| 585 |
+
)
|
| 586 |
+
],
|
| 587 |
+
None,
|
| 588 |
+
x=x,
|
| 589 |
+
y=y,
|
| 590 |
+
scale=min(scales),
|
| 591 |
+
)
|
| 592 |
+
|
| 593 |
+
y += self.panel_height + self.font_size + self.pad
|
| 594 |
+
|
| 595 |
+
if any(
|
| 596 |
+
pt
|
| 597 |
+
in (
|
| 598 |
+
InterpolatableProblem.WRONG_START_POINT,
|
| 599 |
+
InterpolatableProblem.CONTOUR_ORDER,
|
| 600 |
+
InterpolatableProblem.KINK,
|
| 601 |
+
)
|
| 602 |
+
for pt in problem_types
|
| 603 |
+
):
|
| 604 |
+
# Draw the proposed fix
|
| 605 |
+
|
| 606 |
+
self.draw_label(
|
| 607 |
+
"proposed fix",
|
| 608 |
+
x=x,
|
| 609 |
+
y=y,
|
| 610 |
+
color=self.head_color,
|
| 611 |
+
width=self.panel_width,
|
| 612 |
+
align=0.5,
|
| 613 |
+
)
|
| 614 |
+
y += self.font_size + self.pad
|
| 615 |
+
|
| 616 |
+
overriding1 = OverridingDict(glyphset1)
|
| 617 |
+
overriding2 = OverridingDict(glyphset2)
|
| 618 |
+
perContourPen1 = PerContourOrComponentPen(
|
| 619 |
+
RecordingPen, glyphset=overriding1
|
| 620 |
+
)
|
| 621 |
+
perContourPen2 = PerContourOrComponentPen(
|
| 622 |
+
RecordingPen, glyphset=overriding2
|
| 623 |
+
)
|
| 624 |
+
glyphset1[glyphname].draw(perContourPen1)
|
| 625 |
+
glyphset2[glyphname].draw(perContourPen2)
|
| 626 |
+
|
| 627 |
+
for problem in problems:
|
| 628 |
+
if problem["type"] == InterpolatableProblem.CONTOUR_ORDER:
|
| 629 |
+
fixed_contours = [
|
| 630 |
+
perContourPen2.value[i] for i in problems[0]["value_2"]
|
| 631 |
+
]
|
| 632 |
+
perContourPen2.value = fixed_contours
|
| 633 |
+
|
| 634 |
+
for problem in problems:
|
| 635 |
+
if problem["type"] == InterpolatableProblem.WRONG_START_POINT:
|
| 636 |
+
# Save the wrong contours
|
| 637 |
+
wrongContour1 = perContourPen1.value[problem["contour"]]
|
| 638 |
+
wrongContour2 = perContourPen2.value[problem["contour"]]
|
| 639 |
+
|
| 640 |
+
# Convert the wrong contours to point pens
|
| 641 |
+
points1 = RecordingPointPen()
|
| 642 |
+
converter = SegmentToPointPen(points1, False)
|
| 643 |
+
wrongContour1.replay(converter)
|
| 644 |
+
points2 = RecordingPointPen()
|
| 645 |
+
converter = SegmentToPointPen(points2, False)
|
| 646 |
+
wrongContour2.replay(converter)
|
| 647 |
+
|
| 648 |
+
proposed_start = problem["value_2"]
|
| 649 |
+
|
| 650 |
+
# See if we need reversing; fragile but worth a try
|
| 651 |
+
if problem["reversed"]:
|
| 652 |
+
new_points2 = RecordingPointPen()
|
| 653 |
+
reversedPen = ReverseContourPointPen(new_points2)
|
| 654 |
+
points2.replay(reversedPen)
|
| 655 |
+
points2 = new_points2
|
| 656 |
+
proposed_start = len(points2.value) - 2 - proposed_start
|
| 657 |
+
|
| 658 |
+
# Rotate points2 so that the first point is the same as in points1
|
| 659 |
+
beginPath = points2.value[:1]
|
| 660 |
+
endPath = points2.value[-1:]
|
| 661 |
+
pts = points2.value[1:-1]
|
| 662 |
+
pts = pts[proposed_start:] + pts[:proposed_start]
|
| 663 |
+
points2.value = beginPath + pts + endPath
|
| 664 |
+
|
| 665 |
+
# Convert the point pens back to segment pens
|
| 666 |
+
segment1 = RecordingPen()
|
| 667 |
+
converter = PointToSegmentPen(segment1, True)
|
| 668 |
+
points1.replay(converter)
|
| 669 |
+
segment2 = RecordingPen()
|
| 670 |
+
converter = PointToSegmentPen(segment2, True)
|
| 671 |
+
points2.replay(converter)
|
| 672 |
+
|
| 673 |
+
# Replace the wrong contours
|
| 674 |
+
wrongContour1.value = segment1.value
|
| 675 |
+
wrongContour2.value = segment2.value
|
| 676 |
+
perContourPen1.value[problem["contour"]] = wrongContour1
|
| 677 |
+
perContourPen2.value[problem["contour"]] = wrongContour2
|
| 678 |
+
|
| 679 |
+
for problem in problems:
|
| 680 |
+
# If we have a kink, try to fix it.
|
| 681 |
+
if problem["type"] == InterpolatableProblem.KINK:
|
| 682 |
+
# Save the wrong contours
|
| 683 |
+
wrongContour1 = perContourPen1.value[problem["contour"]]
|
| 684 |
+
wrongContour2 = perContourPen2.value[problem["contour"]]
|
| 685 |
+
|
| 686 |
+
# Convert the wrong contours to point pens
|
| 687 |
+
points1 = RecordingPointPen()
|
| 688 |
+
converter = SegmentToPointPen(points1, False)
|
| 689 |
+
wrongContour1.replay(converter)
|
| 690 |
+
points2 = RecordingPointPen()
|
| 691 |
+
converter = SegmentToPointPen(points2, False)
|
| 692 |
+
wrongContour2.replay(converter)
|
| 693 |
+
|
| 694 |
+
i = problem["value"]
|
| 695 |
+
|
| 696 |
+
# Position points to be around the same ratio
|
| 697 |
+
# beginPath / endPath dance
|
| 698 |
+
j = i + 1
|
| 699 |
+
pt0 = points1.value[j][1][0]
|
| 700 |
+
pt1 = points2.value[j][1][0]
|
| 701 |
+
j_prev = (i - 1) % (len(points1.value) - 2) + 1
|
| 702 |
+
pt0_prev = points1.value[j_prev][1][0]
|
| 703 |
+
pt1_prev = points2.value[j_prev][1][0]
|
| 704 |
+
j_next = (i + 1) % (len(points1.value) - 2) + 1
|
| 705 |
+
pt0_next = points1.value[j_next][1][0]
|
| 706 |
+
pt1_next = points2.value[j_next][1][0]
|
| 707 |
+
|
| 708 |
+
pt0 = complex(*pt0)
|
| 709 |
+
pt1 = complex(*pt1)
|
| 710 |
+
pt0_prev = complex(*pt0_prev)
|
| 711 |
+
pt1_prev = complex(*pt1_prev)
|
| 712 |
+
pt0_next = complex(*pt0_next)
|
| 713 |
+
pt1_next = complex(*pt1_next)
|
| 714 |
+
|
| 715 |
+
# Find the ratio of the distance between the points
|
| 716 |
+
r0 = abs(pt0 - pt0_prev) / abs(pt0_next - pt0_prev)
|
| 717 |
+
r1 = abs(pt1 - pt1_prev) / abs(pt1_next - pt1_prev)
|
| 718 |
+
r_mid = (r0 + r1) / 2
|
| 719 |
+
|
| 720 |
+
pt0 = pt0_prev + r_mid * (pt0_next - pt0_prev)
|
| 721 |
+
pt1 = pt1_prev + r_mid * (pt1_next - pt1_prev)
|
| 722 |
+
|
| 723 |
+
points1.value[j] = (
|
| 724 |
+
points1.value[j][0],
|
| 725 |
+
(((pt0.real, pt0.imag),) + points1.value[j][1][1:]),
|
| 726 |
+
points1.value[j][2],
|
| 727 |
+
)
|
| 728 |
+
points2.value[j] = (
|
| 729 |
+
points2.value[j][0],
|
| 730 |
+
(((pt1.real, pt1.imag),) + points2.value[j][1][1:]),
|
| 731 |
+
points2.value[j][2],
|
| 732 |
+
)
|
| 733 |
+
|
| 734 |
+
# Convert the point pens back to segment pens
|
| 735 |
+
segment1 = RecordingPen()
|
| 736 |
+
converter = PointToSegmentPen(segment1, True)
|
| 737 |
+
points1.replay(converter)
|
| 738 |
+
segment2 = RecordingPen()
|
| 739 |
+
converter = PointToSegmentPen(segment2, True)
|
| 740 |
+
points2.replay(converter)
|
| 741 |
+
|
| 742 |
+
# Replace the wrong contours
|
| 743 |
+
wrongContour1.value = segment1.value
|
| 744 |
+
wrongContour2.value = segment2.value
|
| 745 |
+
|
| 746 |
+
# Assemble
|
| 747 |
+
fixed1 = RecordingPen()
|
| 748 |
+
fixed2 = RecordingPen()
|
| 749 |
+
for contour in perContourPen1.value:
|
| 750 |
+
fixed1.value.extend(contour.value)
|
| 751 |
+
for contour in perContourPen2.value:
|
| 752 |
+
fixed2.value.extend(contour.value)
|
| 753 |
+
fixed1.draw = fixed1.replay
|
| 754 |
+
fixed2.draw = fixed2.replay
|
| 755 |
+
|
| 756 |
+
overriding1[glyphname] = fixed1
|
| 757 |
+
overriding2[glyphname] = fixed2
|
| 758 |
+
|
| 759 |
+
try:
|
| 760 |
+
midway_glyphset = LerpGlyphSet(overriding1, overriding2)
|
| 761 |
+
self.draw_glyph(
|
| 762 |
+
midway_glyphset,
|
| 763 |
+
glyphname,
|
| 764 |
+
{"type": "fixed"},
|
| 765 |
+
None,
|
| 766 |
+
x=x,
|
| 767 |
+
y=y,
|
| 768 |
+
scale=min(scales),
|
| 769 |
+
)
|
| 770 |
+
except ValueError:
|
| 771 |
+
self.draw_emoticon(self.shrug, x=x, y=y)
|
| 772 |
+
y += self.panel_height + self.pad
|
| 773 |
+
|
| 774 |
+
else:
|
| 775 |
+
emoticon = self.shrug
|
| 776 |
+
if InterpolatableProblem.UNDERWEIGHT in problem_types:
|
| 777 |
+
emoticon = self.underweight
|
| 778 |
+
elif InterpolatableProblem.OVERWEIGHT in problem_types:
|
| 779 |
+
emoticon = self.overweight
|
| 780 |
+
elif InterpolatableProblem.NOTHING in problem_types:
|
| 781 |
+
emoticon = self.yay
|
| 782 |
+
self.draw_emoticon(emoticon, x=x, y=y)
|
| 783 |
+
|
| 784 |
+
if show_page_number:
|
| 785 |
+
self.draw_label(
|
| 786 |
+
str(self.page_number),
|
| 787 |
+
x=0,
|
| 788 |
+
y=self.height - self.font_size - self.pad,
|
| 789 |
+
width=self.width,
|
| 790 |
+
color=self.head_color,
|
| 791 |
+
align=0.5,
|
| 792 |
+
)
|
| 793 |
+
|
| 794 |
+
def draw_label(
|
| 795 |
+
self,
|
| 796 |
+
label,
|
| 797 |
+
*,
|
| 798 |
+
x=0,
|
| 799 |
+
y=0,
|
| 800 |
+
color=(0, 0, 0),
|
| 801 |
+
align=0,
|
| 802 |
+
bold=False,
|
| 803 |
+
width=None,
|
| 804 |
+
height=None,
|
| 805 |
+
font_size=None,
|
| 806 |
+
):
|
| 807 |
+
if width is None:
|
| 808 |
+
width = self.width
|
| 809 |
+
if height is None:
|
| 810 |
+
height = self.height
|
| 811 |
+
if font_size is None:
|
| 812 |
+
font_size = self.font_size
|
| 813 |
+
cr = cairo.Context(self.surface)
|
| 814 |
+
cr.select_font_face(
|
| 815 |
+
"@cairo:",
|
| 816 |
+
cairo.FONT_SLANT_NORMAL,
|
| 817 |
+
cairo.FONT_WEIGHT_BOLD if bold else cairo.FONT_WEIGHT_NORMAL,
|
| 818 |
+
)
|
| 819 |
+
cr.set_font_size(font_size)
|
| 820 |
+
font_extents = cr.font_extents()
|
| 821 |
+
font_size = font_size * font_size / font_extents[2]
|
| 822 |
+
cr.set_font_size(font_size)
|
| 823 |
+
font_extents = cr.font_extents()
|
| 824 |
+
|
| 825 |
+
cr.set_source_rgb(*color)
|
| 826 |
+
|
| 827 |
+
extents = cr.text_extents(label)
|
| 828 |
+
if extents.width > width:
|
| 829 |
+
# Shrink
|
| 830 |
+
font_size *= width / extents.width
|
| 831 |
+
cr.set_font_size(font_size)
|
| 832 |
+
font_extents = cr.font_extents()
|
| 833 |
+
extents = cr.text_extents(label)
|
| 834 |
+
|
| 835 |
+
# Center
|
| 836 |
+
label_x = x + (width - extents.width) * align
|
| 837 |
+
label_y = y + font_extents[0]
|
| 838 |
+
cr.move_to(label_x, label_y)
|
| 839 |
+
cr.show_text(label)
|
| 840 |
+
|
| 841 |
+
def draw_glyph(self, glyphset, glyphname, problems, which, *, x=0, y=0, scale=None):
|
| 842 |
+
if type(problems) not in (list, tuple):
|
| 843 |
+
problems = [problems]
|
| 844 |
+
|
| 845 |
+
midway = any(problem["type"] == "midway" for problem in problems)
|
| 846 |
+
problem_type = problems[0]["type"]
|
| 847 |
+
problem_types = set(problem["type"] for problem in problems)
|
| 848 |
+
if not all(pt == problem_type for pt in problem_types):
|
| 849 |
+
problem_type = "mixed"
|
| 850 |
+
glyph = glyphset[glyphname]
|
| 851 |
+
|
| 852 |
+
recording = RecordingPen()
|
| 853 |
+
glyph.draw(recording)
|
| 854 |
+
decomposedRecording = DecomposingRecordingPen(glyphset)
|
| 855 |
+
glyph.draw(decomposedRecording)
|
| 856 |
+
|
| 857 |
+
boundsPen = ControlBoundsPen(glyphset)
|
| 858 |
+
decomposedRecording.replay(boundsPen)
|
| 859 |
+
bounds = boundsPen.bounds
|
| 860 |
+
if bounds is None:
|
| 861 |
+
bounds = (0, 0, 0, 0)
|
| 862 |
+
|
| 863 |
+
glyph_width = bounds[2] - bounds[0]
|
| 864 |
+
glyph_height = bounds[3] - bounds[1]
|
| 865 |
+
|
| 866 |
+
if glyph_width:
|
| 867 |
+
if scale is None:
|
| 868 |
+
scale = self.panel_width / glyph_width
|
| 869 |
+
else:
|
| 870 |
+
scale = min(scale, self.panel_height / glyph_height)
|
| 871 |
+
if glyph_height:
|
| 872 |
+
if scale is None:
|
| 873 |
+
scale = self.panel_height / glyph_height
|
| 874 |
+
else:
|
| 875 |
+
scale = min(scale, self.panel_height / glyph_height)
|
| 876 |
+
if scale is None:
|
| 877 |
+
scale = 1
|
| 878 |
+
|
| 879 |
+
cr = cairo.Context(self.surface)
|
| 880 |
+
cr.translate(x, y)
|
| 881 |
+
# Center
|
| 882 |
+
cr.translate(
|
| 883 |
+
(self.panel_width - glyph_width * scale) / 2,
|
| 884 |
+
(self.panel_height - glyph_height * scale) / 2,
|
| 885 |
+
)
|
| 886 |
+
cr.scale(scale, -scale)
|
| 887 |
+
cr.translate(-bounds[0], -bounds[3])
|
| 888 |
+
|
| 889 |
+
if self.border_color:
|
| 890 |
+
cr.set_source_rgb(*self.border_color)
|
| 891 |
+
cr.rectangle(bounds[0], bounds[1], glyph_width, glyph_height)
|
| 892 |
+
cr.set_line_width(self.border_width / scale)
|
| 893 |
+
cr.stroke()
|
| 894 |
+
|
| 895 |
+
if self.fill_color or self.stroke_color:
|
| 896 |
+
pen = CairoPen(glyphset, cr)
|
| 897 |
+
decomposedRecording.replay(pen)
|
| 898 |
+
|
| 899 |
+
if self.fill_color and problem_type != InterpolatableProblem.OPEN_PATH:
|
| 900 |
+
cr.set_source_rgb(*self.fill_color)
|
| 901 |
+
cr.fill_preserve()
|
| 902 |
+
|
| 903 |
+
if self.stroke_color:
|
| 904 |
+
cr.set_source_rgb(*self.stroke_color)
|
| 905 |
+
cr.set_line_width(self.stroke_width / scale)
|
| 906 |
+
cr.stroke_preserve()
|
| 907 |
+
|
| 908 |
+
cr.new_path()
|
| 909 |
+
|
| 910 |
+
if (
|
| 911 |
+
InterpolatableProblem.UNDERWEIGHT in problem_types
|
| 912 |
+
or InterpolatableProblem.OVERWEIGHT in problem_types
|
| 913 |
+
):
|
| 914 |
+
perContourPen = PerContourOrComponentPen(RecordingPen, glyphset=glyphset)
|
| 915 |
+
recording.replay(perContourPen)
|
| 916 |
+
for problem in problems:
|
| 917 |
+
if problem["type"] in (
|
| 918 |
+
InterpolatableProblem.UNDERWEIGHT,
|
| 919 |
+
InterpolatableProblem.OVERWEIGHT,
|
| 920 |
+
):
|
| 921 |
+
contour = perContourPen.value[problem["contour"]]
|
| 922 |
+
contour.replay(CairoPen(glyphset, cr))
|
| 923 |
+
cr.set_source_rgba(*self.weight_issue_contour_color)
|
| 924 |
+
cr.fill()
|
| 925 |
+
|
| 926 |
+
if any(
|
| 927 |
+
t in problem_types
|
| 928 |
+
for t in {
|
| 929 |
+
InterpolatableProblem.NOTHING,
|
| 930 |
+
InterpolatableProblem.NODE_COUNT,
|
| 931 |
+
InterpolatableProblem.NODE_INCOMPATIBILITY,
|
| 932 |
+
}
|
| 933 |
+
):
|
| 934 |
+
cr.set_line_cap(cairo.LINE_CAP_ROUND)
|
| 935 |
+
|
| 936 |
+
# Oncurve nodes
|
| 937 |
+
for segment, args in decomposedRecording.value:
|
| 938 |
+
if not args:
|
| 939 |
+
continue
|
| 940 |
+
x, y = args[-1]
|
| 941 |
+
cr.move_to(x, y)
|
| 942 |
+
cr.line_to(x, y)
|
| 943 |
+
cr.set_source_rgba(*self.oncurve_node_color)
|
| 944 |
+
cr.set_line_width(self.oncurve_node_diameter / scale)
|
| 945 |
+
cr.stroke()
|
| 946 |
+
|
| 947 |
+
# Offcurve nodes
|
| 948 |
+
for segment, args in decomposedRecording.value:
|
| 949 |
+
if not args:
|
| 950 |
+
continue
|
| 951 |
+
for x, y in args[:-1]:
|
| 952 |
+
cr.move_to(x, y)
|
| 953 |
+
cr.line_to(x, y)
|
| 954 |
+
cr.set_source_rgba(*self.offcurve_node_color)
|
| 955 |
+
cr.set_line_width(self.offcurve_node_diameter / scale)
|
| 956 |
+
cr.stroke()
|
| 957 |
+
|
| 958 |
+
# Handles
|
| 959 |
+
for segment, args in decomposedRecording.value:
|
| 960 |
+
if not args:
|
| 961 |
+
pass
|
| 962 |
+
elif segment in ("moveTo", "lineTo"):
|
| 963 |
+
cr.move_to(*args[0])
|
| 964 |
+
elif segment == "qCurveTo":
|
| 965 |
+
for x, y in args:
|
| 966 |
+
cr.line_to(x, y)
|
| 967 |
+
cr.new_sub_path()
|
| 968 |
+
cr.move_to(*args[-1])
|
| 969 |
+
elif segment == "curveTo":
|
| 970 |
+
cr.line_to(*args[0])
|
| 971 |
+
cr.new_sub_path()
|
| 972 |
+
cr.move_to(*args[1])
|
| 973 |
+
cr.line_to(*args[2])
|
| 974 |
+
cr.new_sub_path()
|
| 975 |
+
cr.move_to(*args[-1])
|
| 976 |
+
else:
|
| 977 |
+
continue
|
| 978 |
+
|
| 979 |
+
cr.set_source_rgba(*self.handle_color)
|
| 980 |
+
cr.set_line_width(self.handle_width / scale)
|
| 981 |
+
cr.stroke()
|
| 982 |
+
|
| 983 |
+
matching = None
|
| 984 |
+
for problem in problems:
|
| 985 |
+
if problem["type"] == InterpolatableProblem.CONTOUR_ORDER:
|
| 986 |
+
matching = problem["value_2"]
|
| 987 |
+
colors = cycle(self.contour_colors)
|
| 988 |
+
perContourPen = PerContourOrComponentPen(
|
| 989 |
+
RecordingPen, glyphset=glyphset
|
| 990 |
+
)
|
| 991 |
+
recording.replay(perContourPen)
|
| 992 |
+
for i, contour in enumerate(perContourPen.value):
|
| 993 |
+
if matching[i] == i:
|
| 994 |
+
continue
|
| 995 |
+
color = next(colors)
|
| 996 |
+
contour.replay(CairoPen(glyphset, cr))
|
| 997 |
+
cr.set_source_rgba(*color, self.contour_alpha)
|
| 998 |
+
cr.fill()
|
| 999 |
+
|
| 1000 |
+
for problem in problems:
|
| 1001 |
+
if problem["type"] in (
|
| 1002 |
+
InterpolatableProblem.NOTHING,
|
| 1003 |
+
InterpolatableProblem.WRONG_START_POINT,
|
| 1004 |
+
):
|
| 1005 |
+
idx = problem.get("contour")
|
| 1006 |
+
|
| 1007 |
+
# Draw suggested point
|
| 1008 |
+
if idx is not None and which == 1 and "value_2" in problem:
|
| 1009 |
+
perContourPen = PerContourOrComponentPen(
|
| 1010 |
+
RecordingPen, glyphset=glyphset
|
| 1011 |
+
)
|
| 1012 |
+
decomposedRecording.replay(perContourPen)
|
| 1013 |
+
points = SimpleRecordingPointPen()
|
| 1014 |
+
converter = SegmentToPointPen(points, False)
|
| 1015 |
+
perContourPen.value[
|
| 1016 |
+
idx if matching is None else matching[idx]
|
| 1017 |
+
].replay(converter)
|
| 1018 |
+
targetPoint = points.value[problem["value_2"]][0]
|
| 1019 |
+
cr.save()
|
| 1020 |
+
cr.translate(*targetPoint)
|
| 1021 |
+
cr.scale(1 / scale, 1 / scale)
|
| 1022 |
+
self.draw_dot(
|
| 1023 |
+
cr,
|
| 1024 |
+
diameter=self.corrected_start_point_size,
|
| 1025 |
+
color=self.corrected_start_point_color,
|
| 1026 |
+
)
|
| 1027 |
+
cr.restore()
|
| 1028 |
+
|
| 1029 |
+
# Draw start-point arrow
|
| 1030 |
+
if which == 0 or not problem.get("reversed"):
|
| 1031 |
+
color = self.start_point_color
|
| 1032 |
+
else:
|
| 1033 |
+
color = self.wrong_start_point_color
|
| 1034 |
+
first_pt = None
|
| 1035 |
+
i = 0
|
| 1036 |
+
cr.save()
|
| 1037 |
+
for segment, args in decomposedRecording.value:
|
| 1038 |
+
if segment == "moveTo":
|
| 1039 |
+
first_pt = args[0]
|
| 1040 |
+
continue
|
| 1041 |
+
if first_pt is None:
|
| 1042 |
+
continue
|
| 1043 |
+
if segment == "closePath":
|
| 1044 |
+
second_pt = first_pt
|
| 1045 |
+
else:
|
| 1046 |
+
second_pt = args[0]
|
| 1047 |
+
|
| 1048 |
+
if idx is None or i == idx:
|
| 1049 |
+
cr.save()
|
| 1050 |
+
first_pt = complex(*first_pt)
|
| 1051 |
+
second_pt = complex(*second_pt)
|
| 1052 |
+
length = abs(second_pt - first_pt)
|
| 1053 |
+
cr.translate(first_pt.real, first_pt.imag)
|
| 1054 |
+
if length:
|
| 1055 |
+
# Draw arrowhead
|
| 1056 |
+
cr.rotate(
|
| 1057 |
+
math.atan2(
|
| 1058 |
+
second_pt.imag - first_pt.imag,
|
| 1059 |
+
second_pt.real - first_pt.real,
|
| 1060 |
+
)
|
| 1061 |
+
)
|
| 1062 |
+
cr.scale(1 / scale, 1 / scale)
|
| 1063 |
+
self.draw_arrow(cr, color=color)
|
| 1064 |
+
else:
|
| 1065 |
+
# Draw circle
|
| 1066 |
+
cr.scale(1 / scale, 1 / scale)
|
| 1067 |
+
self.draw_dot(
|
| 1068 |
+
cr,
|
| 1069 |
+
diameter=self.corrected_start_point_size,
|
| 1070 |
+
color=color,
|
| 1071 |
+
)
|
| 1072 |
+
cr.restore()
|
| 1073 |
+
|
| 1074 |
+
if idx is not None:
|
| 1075 |
+
break
|
| 1076 |
+
|
| 1077 |
+
first_pt = None
|
| 1078 |
+
i += 1
|
| 1079 |
+
|
| 1080 |
+
cr.restore()
|
| 1081 |
+
|
| 1082 |
+
if problem["type"] == InterpolatableProblem.KINK:
|
| 1083 |
+
idx = problem.get("contour")
|
| 1084 |
+
perContourPen = PerContourOrComponentPen(
|
| 1085 |
+
RecordingPen, glyphset=glyphset
|
| 1086 |
+
)
|
| 1087 |
+
decomposedRecording.replay(perContourPen)
|
| 1088 |
+
points = SimpleRecordingPointPen()
|
| 1089 |
+
converter = SegmentToPointPen(points, False)
|
| 1090 |
+
perContourPen.value[idx if matching is None else matching[idx]].replay(
|
| 1091 |
+
converter
|
| 1092 |
+
)
|
| 1093 |
+
|
| 1094 |
+
targetPoint = points.value[problem["value"]][0]
|
| 1095 |
+
cr.save()
|
| 1096 |
+
cr.translate(*targetPoint)
|
| 1097 |
+
cr.scale(1 / scale, 1 / scale)
|
| 1098 |
+
if midway:
|
| 1099 |
+
self.draw_circle(
|
| 1100 |
+
cr,
|
| 1101 |
+
diameter=self.kink_circle_size,
|
| 1102 |
+
stroke_width=self.kink_circle_stroke_width,
|
| 1103 |
+
color=self.kink_circle_color,
|
| 1104 |
+
)
|
| 1105 |
+
else:
|
| 1106 |
+
self.draw_dot(
|
| 1107 |
+
cr,
|
| 1108 |
+
diameter=self.kink_point_size,
|
| 1109 |
+
color=self.kink_point_color,
|
| 1110 |
+
)
|
| 1111 |
+
cr.restore()
|
| 1112 |
+
|
| 1113 |
+
return scale
|
| 1114 |
+
|
| 1115 |
+
def draw_dot(self, cr, *, x=0, y=0, color=(0, 0, 0), diameter=10):
|
| 1116 |
+
cr.save()
|
| 1117 |
+
cr.set_line_width(diameter)
|
| 1118 |
+
cr.set_line_cap(cairo.LINE_CAP_ROUND)
|
| 1119 |
+
cr.move_to(x, y)
|
| 1120 |
+
cr.line_to(x, y)
|
| 1121 |
+
if len(color) == 3:
|
| 1122 |
+
color = color + (1,)
|
| 1123 |
+
cr.set_source_rgba(*color)
|
| 1124 |
+
cr.stroke()
|
| 1125 |
+
cr.restore()
|
| 1126 |
+
|
| 1127 |
+
def draw_circle(
|
| 1128 |
+
self, cr, *, x=0, y=0, color=(0, 0, 0), diameter=10, stroke_width=1
|
| 1129 |
+
):
|
| 1130 |
+
cr.save()
|
| 1131 |
+
cr.set_line_width(stroke_width)
|
| 1132 |
+
cr.set_line_cap(cairo.LINE_CAP_SQUARE)
|
| 1133 |
+
cr.arc(x, y, diameter / 2, 0, 2 * math.pi)
|
| 1134 |
+
if len(color) == 3:
|
| 1135 |
+
color = color + (1,)
|
| 1136 |
+
cr.set_source_rgba(*color)
|
| 1137 |
+
cr.stroke()
|
| 1138 |
+
cr.restore()
|
| 1139 |
+
|
| 1140 |
+
def draw_arrow(self, cr, *, x=0, y=0, color=(0, 0, 0)):
|
| 1141 |
+
cr.save()
|
| 1142 |
+
if len(color) == 3:
|
| 1143 |
+
color = color + (1,)
|
| 1144 |
+
cr.set_source_rgba(*color)
|
| 1145 |
+
cr.translate(self.start_arrow_length + x, y)
|
| 1146 |
+
cr.move_to(0, 0)
|
| 1147 |
+
cr.line_to(
|
| 1148 |
+
-self.start_arrow_length,
|
| 1149 |
+
-self.start_arrow_length * 0.4,
|
| 1150 |
+
)
|
| 1151 |
+
cr.line_to(
|
| 1152 |
+
-self.start_arrow_length,
|
| 1153 |
+
self.start_arrow_length * 0.4,
|
| 1154 |
+
)
|
| 1155 |
+
cr.close_path()
|
| 1156 |
+
cr.fill()
|
| 1157 |
+
cr.restore()
|
| 1158 |
+
|
| 1159 |
+
def draw_text(self, text, *, x=0, y=0, color=(0, 0, 0), width=None, height=None):
|
| 1160 |
+
if width is None:
|
| 1161 |
+
width = self.width
|
| 1162 |
+
if height is None:
|
| 1163 |
+
height = self.height
|
| 1164 |
+
|
| 1165 |
+
text = text.splitlines()
|
| 1166 |
+
cr = cairo.Context(self.surface)
|
| 1167 |
+
cr.set_source_rgb(*color)
|
| 1168 |
+
cr.set_font_size(self.font_size)
|
| 1169 |
+
cr.select_font_face(
|
| 1170 |
+
"@cairo:monospace", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL
|
| 1171 |
+
)
|
| 1172 |
+
text_width = 0
|
| 1173 |
+
text_height = 0
|
| 1174 |
+
font_extents = cr.font_extents()
|
| 1175 |
+
font_font_size = font_extents[2]
|
| 1176 |
+
font_ascent = font_extents[0]
|
| 1177 |
+
for line in text:
|
| 1178 |
+
extents = cr.text_extents(line)
|
| 1179 |
+
text_width = max(text_width, extents.x_advance)
|
| 1180 |
+
text_height += font_font_size
|
| 1181 |
+
if not text_width:
|
| 1182 |
+
return
|
| 1183 |
+
cr.translate(x, y)
|
| 1184 |
+
scale = min(width / text_width, height / text_height)
|
| 1185 |
+
# center
|
| 1186 |
+
cr.translate(
|
| 1187 |
+
(width - text_width * scale) / 2, (height - text_height * scale) / 2
|
| 1188 |
+
)
|
| 1189 |
+
cr.scale(scale, scale)
|
| 1190 |
+
|
| 1191 |
+
cr.translate(0, font_ascent)
|
| 1192 |
+
for line in text:
|
| 1193 |
+
cr.move_to(0, 0)
|
| 1194 |
+
cr.show_text(line)
|
| 1195 |
+
cr.translate(0, font_font_size)
|
| 1196 |
+
|
| 1197 |
+
def draw_cupcake(self):
|
| 1198 |
+
self.draw_label(
|
| 1199 |
+
self.no_issues_label,
|
| 1200 |
+
x=self.pad,
|
| 1201 |
+
y=self.pad,
|
| 1202 |
+
color=self.no_issues_label_color,
|
| 1203 |
+
width=self.width - 2 * self.pad,
|
| 1204 |
+
align=0.5,
|
| 1205 |
+
bold=True,
|
| 1206 |
+
font_size=self.title_font_size,
|
| 1207 |
+
)
|
| 1208 |
+
|
| 1209 |
+
self.draw_text(
|
| 1210 |
+
self.cupcake,
|
| 1211 |
+
x=self.pad,
|
| 1212 |
+
y=self.pad + self.font_size,
|
| 1213 |
+
width=self.width - 2 * self.pad,
|
| 1214 |
+
height=self.height - 2 * self.pad - self.font_size,
|
| 1215 |
+
color=self.cupcake_color,
|
| 1216 |
+
)
|
| 1217 |
+
|
| 1218 |
+
def draw_emoticon(self, emoticon, x=0, y=0):
|
| 1219 |
+
self.draw_text(
|
| 1220 |
+
emoticon,
|
| 1221 |
+
x=x,
|
| 1222 |
+
y=y,
|
| 1223 |
+
color=self.emoticon_color,
|
| 1224 |
+
width=self.panel_width,
|
| 1225 |
+
height=self.panel_height,
|
| 1226 |
+
)
|
| 1227 |
+
|
| 1228 |
+
|
| 1229 |
+
class InterpolatablePostscriptLike(InterpolatablePlot):
|
| 1230 |
+
def __exit__(self, type, value, traceback):
|
| 1231 |
+
self.surface.finish()
|
| 1232 |
+
|
| 1233 |
+
def show_page(self):
|
| 1234 |
+
super().show_page()
|
| 1235 |
+
self.surface.show_page()
|
| 1236 |
+
|
| 1237 |
+
|
| 1238 |
+
class InterpolatablePS(InterpolatablePostscriptLike):
|
| 1239 |
+
def __enter__(self):
|
| 1240 |
+
self.surface = cairo.PSSurface(self.out, self.width, self.height)
|
| 1241 |
+
return self
|
| 1242 |
+
|
| 1243 |
+
|
| 1244 |
+
class InterpolatablePDF(InterpolatablePostscriptLike):
|
| 1245 |
+
def __enter__(self):
|
| 1246 |
+
self.surface = cairo.PDFSurface(self.out, self.width, self.height)
|
| 1247 |
+
self.surface.set_metadata(
|
| 1248 |
+
cairo.PDF_METADATA_CREATOR, "fonttools varLib.interpolatable"
|
| 1249 |
+
)
|
| 1250 |
+
self.surface.set_metadata(cairo.PDF_METADATA_CREATE_DATE, "")
|
| 1251 |
+
return self
|
| 1252 |
+
|
| 1253 |
+
|
| 1254 |
+
class InterpolatableSVG(InterpolatablePlot):
|
| 1255 |
+
def __enter__(self):
|
| 1256 |
+
self.sink = BytesIO()
|
| 1257 |
+
self.surface = cairo.SVGSurface(self.sink, self.width, self.height)
|
| 1258 |
+
return self
|
| 1259 |
+
|
| 1260 |
+
def __exit__(self, type, value, traceback):
|
| 1261 |
+
if self.surface is not None:
|
| 1262 |
+
self.show_page()
|
| 1263 |
+
|
| 1264 |
+
def show_page(self):
|
| 1265 |
+
super().show_page()
|
| 1266 |
+
self.surface.finish()
|
| 1267 |
+
self.out.append(self.sink.getvalue())
|
| 1268 |
+
self.sink = BytesIO()
|
| 1269 |
+
self.surface = cairo.SVGSurface(self.sink, self.width, self.height)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/plot.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Visualize DesignSpaceDocument and resulting VariationModel."""
|
| 2 |
+
|
| 3 |
+
from fontTools.varLib.models import VariationModel, supportScalar
|
| 4 |
+
from fontTools.designspaceLib import DesignSpaceDocument
|
| 5 |
+
from matplotlib import pyplot
|
| 6 |
+
from mpl_toolkits.mplot3d import axes3d
|
| 7 |
+
from itertools import cycle
|
| 8 |
+
import math
|
| 9 |
+
import logging
|
| 10 |
+
import sys
|
| 11 |
+
|
| 12 |
+
log = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def stops(support, count=10):
|
| 16 |
+
a, b, c = support
|
| 17 |
+
|
| 18 |
+
return (
|
| 19 |
+
[a + (b - a) * i / count for i in range(count)]
|
| 20 |
+
+ [b + (c - b) * i / count for i in range(count)]
|
| 21 |
+
+ [c]
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _plotLocationsDots(locations, axes, subplot, **kwargs):
|
| 26 |
+
for loc, color in zip(locations, cycle(pyplot.cm.Set1.colors)):
|
| 27 |
+
if len(axes) == 1:
|
| 28 |
+
subplot.plot([loc.get(axes[0], 0)], [1.0], "o", color=color, **kwargs)
|
| 29 |
+
elif len(axes) == 2:
|
| 30 |
+
subplot.plot(
|
| 31 |
+
[loc.get(axes[0], 0)],
|
| 32 |
+
[loc.get(axes[1], 0)],
|
| 33 |
+
[1.0],
|
| 34 |
+
"o",
|
| 35 |
+
color=color,
|
| 36 |
+
**kwargs,
|
| 37 |
+
)
|
| 38 |
+
else:
|
| 39 |
+
raise AssertionError(len(axes))
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def plotLocations(locations, fig, names=None, **kwargs):
|
| 43 |
+
n = len(locations)
|
| 44 |
+
cols = math.ceil(n**0.5)
|
| 45 |
+
rows = math.ceil(n / cols)
|
| 46 |
+
|
| 47 |
+
if names is None:
|
| 48 |
+
names = [None] * len(locations)
|
| 49 |
+
|
| 50 |
+
model = VariationModel(locations)
|
| 51 |
+
names = [names[model.reverseMapping[i]] for i in range(len(names))]
|
| 52 |
+
|
| 53 |
+
axes = sorted(locations[0].keys())
|
| 54 |
+
if len(axes) == 1:
|
| 55 |
+
_plotLocations2D(model, axes[0], fig, cols, rows, names=names, **kwargs)
|
| 56 |
+
elif len(axes) == 2:
|
| 57 |
+
_plotLocations3D(model, axes, fig, cols, rows, names=names, **kwargs)
|
| 58 |
+
else:
|
| 59 |
+
raise ValueError("Only 1 or 2 axes are supported")
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _plotLocations2D(model, axis, fig, cols, rows, names, **kwargs):
|
| 63 |
+
subplot = fig.add_subplot(111)
|
| 64 |
+
for i, (support, color, name) in enumerate(
|
| 65 |
+
zip(model.supports, cycle(pyplot.cm.Set1.colors), cycle(names))
|
| 66 |
+
):
|
| 67 |
+
if name is not None:
|
| 68 |
+
subplot.set_title(name)
|
| 69 |
+
subplot.set_xlabel(axis)
|
| 70 |
+
pyplot.xlim(-1.0, +1.0)
|
| 71 |
+
|
| 72 |
+
Xs = support.get(axis, (-1.0, 0.0, +1.0))
|
| 73 |
+
X, Y = [], []
|
| 74 |
+
for x in stops(Xs):
|
| 75 |
+
y = supportScalar({axis: x}, support)
|
| 76 |
+
X.append(x)
|
| 77 |
+
Y.append(y)
|
| 78 |
+
subplot.plot(X, Y, color=color, **kwargs)
|
| 79 |
+
|
| 80 |
+
_plotLocationsDots(model.locations, [axis], subplot)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _plotLocations3D(model, axes, fig, rows, cols, names, **kwargs):
|
| 84 |
+
ax1, ax2 = axes
|
| 85 |
+
|
| 86 |
+
axis3D = fig.add_subplot(111, projection="3d")
|
| 87 |
+
for i, (support, color, name) in enumerate(
|
| 88 |
+
zip(model.supports, cycle(pyplot.cm.Set1.colors), cycle(names))
|
| 89 |
+
):
|
| 90 |
+
if name is not None:
|
| 91 |
+
axis3D.set_title(name)
|
| 92 |
+
axis3D.set_xlabel(ax1)
|
| 93 |
+
axis3D.set_ylabel(ax2)
|
| 94 |
+
pyplot.xlim(-1.0, +1.0)
|
| 95 |
+
pyplot.ylim(-1.0, +1.0)
|
| 96 |
+
|
| 97 |
+
Xs = support.get(ax1, (-1.0, 0.0, +1.0))
|
| 98 |
+
Ys = support.get(ax2, (-1.0, 0.0, +1.0))
|
| 99 |
+
for x in stops(Xs):
|
| 100 |
+
X, Y, Z = [], [], []
|
| 101 |
+
for y in Ys:
|
| 102 |
+
z = supportScalar({ax1: x, ax2: y}, support)
|
| 103 |
+
X.append(x)
|
| 104 |
+
Y.append(y)
|
| 105 |
+
Z.append(z)
|
| 106 |
+
axis3D.plot(X, Y, Z, color=color, **kwargs)
|
| 107 |
+
for y in stops(Ys):
|
| 108 |
+
X, Y, Z = [], [], []
|
| 109 |
+
for x in Xs:
|
| 110 |
+
z = supportScalar({ax1: x, ax2: y}, support)
|
| 111 |
+
X.append(x)
|
| 112 |
+
Y.append(y)
|
| 113 |
+
Z.append(z)
|
| 114 |
+
axis3D.plot(X, Y, Z, color=color, **kwargs)
|
| 115 |
+
|
| 116 |
+
_plotLocationsDots(model.locations, [ax1, ax2], axis3D)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def plotDocument(doc, fig, **kwargs):
|
| 120 |
+
doc.normalize()
|
| 121 |
+
locations = [s.location for s in doc.sources]
|
| 122 |
+
names = [s.name for s in doc.sources]
|
| 123 |
+
plotLocations(locations, fig, names, **kwargs)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _plotModelFromMasters2D(model, masterValues, fig, **kwargs):
|
| 127 |
+
assert len(model.axisOrder) == 1
|
| 128 |
+
axis = model.axisOrder[0]
|
| 129 |
+
|
| 130 |
+
axis_min = min(loc.get(axis, 0) for loc in model.locations)
|
| 131 |
+
axis_max = max(loc.get(axis, 0) for loc in model.locations)
|
| 132 |
+
|
| 133 |
+
import numpy as np
|
| 134 |
+
|
| 135 |
+
X = np.arange(axis_min, axis_max, (axis_max - axis_min) / 100)
|
| 136 |
+
Y = []
|
| 137 |
+
|
| 138 |
+
for x in X:
|
| 139 |
+
loc = {axis: x}
|
| 140 |
+
v = model.interpolateFromMasters(loc, masterValues)
|
| 141 |
+
Y.append(v)
|
| 142 |
+
|
| 143 |
+
subplot = fig.add_subplot(111)
|
| 144 |
+
subplot.plot(X, Y, "-", **kwargs)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def _plotModelFromMasters3D(model, masterValues, fig, **kwargs):
|
| 148 |
+
assert len(model.axisOrder) == 2
|
| 149 |
+
axis1, axis2 = model.axisOrder[0], model.axisOrder[1]
|
| 150 |
+
|
| 151 |
+
axis1_min = min(loc.get(axis1, 0) for loc in model.locations)
|
| 152 |
+
axis1_max = max(loc.get(axis1, 0) for loc in model.locations)
|
| 153 |
+
axis2_min = min(loc.get(axis2, 0) for loc in model.locations)
|
| 154 |
+
axis2_max = max(loc.get(axis2, 0) for loc in model.locations)
|
| 155 |
+
|
| 156 |
+
import numpy as np
|
| 157 |
+
|
| 158 |
+
X = np.arange(axis1_min, axis1_max, (axis1_max - axis1_min) / 100)
|
| 159 |
+
Y = np.arange(axis2_min, axis2_max, (axis2_max - axis2_min) / 100)
|
| 160 |
+
X, Y = np.meshgrid(X, Y)
|
| 161 |
+
Z = []
|
| 162 |
+
|
| 163 |
+
for row_x, row_y in zip(X, Y):
|
| 164 |
+
z_row = []
|
| 165 |
+
Z.append(z_row)
|
| 166 |
+
for x, y in zip(row_x, row_y):
|
| 167 |
+
loc = {axis1: x, axis2: y}
|
| 168 |
+
v = model.interpolateFromMasters(loc, masterValues)
|
| 169 |
+
z_row.append(v)
|
| 170 |
+
Z = np.array(Z)
|
| 171 |
+
|
| 172 |
+
axis3D = fig.add_subplot(111, projection="3d")
|
| 173 |
+
axis3D.plot_surface(X, Y, Z, **kwargs)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def plotModelFromMasters(model, masterValues, fig, **kwargs):
|
| 177 |
+
"""Plot a variation model and set of master values corresponding
|
| 178 |
+
to the locations to the model into a pyplot figure. Variation
|
| 179 |
+
model must have axisOrder of size 1 or 2."""
|
| 180 |
+
if len(model.axisOrder) == 1:
|
| 181 |
+
_plotModelFromMasters2D(model, masterValues, fig, **kwargs)
|
| 182 |
+
elif len(model.axisOrder) == 2:
|
| 183 |
+
_plotModelFromMasters3D(model, masterValues, fig, **kwargs)
|
| 184 |
+
else:
|
| 185 |
+
raise ValueError("Only 1 or 2 axes are supported")
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def main(args=None):
|
| 189 |
+
from fontTools import configLogger
|
| 190 |
+
|
| 191 |
+
if args is None:
|
| 192 |
+
args = sys.argv[1:]
|
| 193 |
+
|
| 194 |
+
# configure the library logger (for >= WARNING)
|
| 195 |
+
configLogger()
|
| 196 |
+
# comment this out to enable debug messages from logger
|
| 197 |
+
# log.setLevel(logging.DEBUG)
|
| 198 |
+
|
| 199 |
+
if len(args) < 1:
|
| 200 |
+
print("usage: fonttools varLib.plot source.designspace", file=sys.stderr)
|
| 201 |
+
print(" or")
|
| 202 |
+
print("usage: fonttools varLib.plot location1 location2 ...", file=sys.stderr)
|
| 203 |
+
print(" or")
|
| 204 |
+
print(
|
| 205 |
+
"usage: fonttools varLib.plot location1=value1 location2=value2 ...",
|
| 206 |
+
file=sys.stderr,
|
| 207 |
+
)
|
| 208 |
+
sys.exit(1)
|
| 209 |
+
|
| 210 |
+
fig = pyplot.figure()
|
| 211 |
+
fig.set_tight_layout(True)
|
| 212 |
+
|
| 213 |
+
if len(args) == 1 and args[0].endswith(".designspace"):
|
| 214 |
+
doc = DesignSpaceDocument()
|
| 215 |
+
doc.read(args[0])
|
| 216 |
+
plotDocument(doc, fig)
|
| 217 |
+
else:
|
| 218 |
+
axes = [chr(c) for c in range(ord("A"), ord("Z") + 1)]
|
| 219 |
+
if "=" not in args[0]:
|
| 220 |
+
locs = [dict(zip(axes, (float(v) for v in s.split(",")))) for s in args]
|
| 221 |
+
plotLocations(locs, fig)
|
| 222 |
+
else:
|
| 223 |
+
locations = []
|
| 224 |
+
masterValues = []
|
| 225 |
+
for arg in args:
|
| 226 |
+
loc, v = arg.split("=")
|
| 227 |
+
locations.append(dict(zip(axes, (float(v) for v in loc.split(",")))))
|
| 228 |
+
masterValues.append(float(v))
|
| 229 |
+
model = VariationModel(locations, axes[: len(locations[0])])
|
| 230 |
+
plotModelFromMasters(model, masterValues, fig)
|
| 231 |
+
|
| 232 |
+
pyplot.show()
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
if __name__ == "__main__":
|
| 236 |
+
import sys
|
| 237 |
+
|
| 238 |
+
sys.exit(main())
|
evalkit_tf437/lib/python3.10/site-packages/google_crc32c/__config__.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# https://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def modify_path():
|
| 19 |
+
"""Modify the module search path."""
|
| 20 |
+
# Only modify path on Windows.
|
| 21 |
+
if os.name != "nt":
|
| 22 |
+
return
|
| 23 |
+
|
| 24 |
+
path = os.environ.get("PATH")
|
| 25 |
+
if path is None:
|
| 26 |
+
return
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
from importlib.resources import files as _resources_files
|
| 30 |
+
|
| 31 |
+
extra_dll_dir = str(_resources_files("google_crc32c") / "extra-dll")
|
| 32 |
+
if os.path.isdir(extra_dll_dir):
|
| 33 |
+
# Python 3.8+ uses add_dll_directory.
|
| 34 |
+
os.add_dll_directory(extra_dll_dir)
|
| 35 |
+
except ImportError:
|
| 36 |
+
pass
|
| 37 |
+
|
| 38 |
+
modify_path()
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.89 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/console.cpython-310.pyc
ADDED
|
Binary file (1.87 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/filter.cpython-310.pyc
ADDED
|
Binary file (2.63 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/plugin.cpython-310.pyc
ADDED
|
Binary file (1.92 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/regexopt.cpython-310.pyc
ADDED
|
Binary file (2.93 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/style.cpython-310.pyc
ADDED
|
Binary file (4.58 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/token.cpython-310.pyc
ADDED
|
Binary file (4.67 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__pycache__/unistring.cpython-310.pyc
ADDED
|
Binary file (31.2 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-310.pyc
ADDED
|
Binary file (17.6 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-310.pyc
ADDED
|
Binary file (68 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-310.pyc
ADDED
|
Binary file (23.1 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-310.pyc
ADDED
|
Binary file (20.6 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-310.pyc
ADDED
|
Binary file (9.91 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-310.pyc
ADDED
|
Binary file (1.35 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/archetype.cpython-310.pyc
ADDED
|
Binary file (6.7 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/arrow.cpython-310.pyc
ADDED
|
Binary file (2.36 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/asc.cpython-310.pyc
ADDED
|
Binary file (1.75 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/automation.cpython-310.pyc
ADDED
|
Binary file (16.2 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/compiled.cpython-310.pyc
ADDED
|
Binary file (1.93 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/d.cpython-310.pyc
ADDED
|
Binary file (6.19 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/diff.cpython-310.pyc
ADDED
|
Binary file (4 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/elpi.cpython-310.pyc
ADDED
|
Binary file (4.07 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/felix.cpython-310.pyc
ADDED
|
Binary file (5.74 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/inferno.cpython-310.pyc
ADDED
|
Binary file (2.58 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/installers.cpython-310.pyc
ADDED
|
Binary file (9.86 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/jvm.cpython-310.pyc
ADDED
|
Binary file (42.8 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/maxima.cpython-310.pyc
ADDED
|
Binary file (2.46 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/ml.cpython-310.pyc
ADDED
|
Binary file (15.4 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/mojo.cpython-310.pyc
ADDED
|
Binary file (9.37 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/nix.cpython-310.pyc
ADDED
|
Binary file (3.38 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/oberon.cpython-310.pyc
ADDED
|
Binary file (2.97 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/openscad.cpython-310.pyc
ADDED
|
Binary file (2.82 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/perl.cpython-310.pyc
ADDED
|
Binary file (29 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/php.cpython-310.pyc
ADDED
|
Binary file (9.89 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/pony.cpython-310.pyc
ADDED
|
Binary file (2.55 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/prolog.cpython-310.pyc
ADDED
|
Binary file (7.9 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/__pycache__/scdoc.cpython-310.pyc
ADDED
|
Binary file (2.15 kB). View file
|
|
|