diff --git a/.gitattributes b/.gitattributes index d3e4f86d74f1ef1a7f2dc59479c8d6bc01c99822..c08dd4e122cb449d2128ab92f8ee267b00a2fb32 100644 --- a/.gitattributes +++ b/.gitattributes @@ -122,3 +122,5 @@ parrot/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_compute.cpyth parrot/lib/python3.10/site-packages/pyarrow/_orc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text parrot/lib/libncurses.a filter=lfs diff=lfs merge=lfs -text parrot/lib/python3.10/site-packages/pyarrow/_s3fs.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text +parrot/lib/python3.10/site-packages/pyarrow/_acero.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text diff --git a/parrot/lib/python3.10/site-packages/_multiprocess/__init__.py b/parrot/lib/python3.10/site-packages/_multiprocess/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..085958182e2246471e6d6f655ff00b6e37e3e99a --- /dev/null +++ b/parrot/lib/python3.10/site-packages/_multiprocess/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/multiprocess/blob/master/LICENSE + +from _multiprocessing import * diff --git a/parrot/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc b/parrot/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..534cb5fde9b644da7b88ed244071578082f51868 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/_multiprocess/__pycache__/__init__.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so b/parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..3c52c7807bbf0a7205aab41ca715ad53d60b10d2 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:35769dee5631a7ebc7ee0db2404e68d7df9a298c7521bfc9bbf5ff2bd3aea355 +size 414160 diff --git a/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..7082a2d5b9047bfc09589f387053e24ea490bc54 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..5a43b6f9e595e688c98935139bb34114dd004428 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/RECORD @@ -0,0 +1,11 @@ +aiosignal-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiosignal-1.3.1.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +aiosignal-1.3.1.dist-info/METADATA,sha256=c0HRnlYzfXKztZPTFDlPfygizTherhG5WdwXlvco0Ug,4008 +aiosignal-1.3.1.dist-info/RECORD,, +aiosignal-1.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aiosignal-1.3.1.dist-info/WHEEL,sha256=ZL1lC_LiPDNRgDnOl2taCMc83aPEUZgHHv2h-LDgdiM,92 +aiosignal-1.3.1.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 +aiosignal/__init__.py,sha256=zQNfFYRSd84bswvpFv8ZWjEr5DeYwV3LXbMSyo2222s,867 +aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311 +aiosignal/__pycache__/__init__.cpython-310.pyc,, +aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/REQUESTED b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..5e1f087ca1ac49327ef76b101df80489a03c2e7f --- /dev/null +++ b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac6df3afe74a5fd43afc7ab7f8393571a495fdc5 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/aiosignal-1.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +aiosignal diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..feabc1ac949b01717e48779efa9fe5e60fb1dcb1 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ec3899fc928f5e4729f70a530377c253a3451ab Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7c37123acbbf3cf8e6c4b828797ded6ca14f9b5 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a012b0f005a6d7ce3e1cfcb02a2d1532e7e1af23 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2424ceff82f8ff21d9f8e6d898aea35e69ad32e7 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bbe57458fd06dc4a6ee36b2e4214df14484b146d Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc b/parrot/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4bdcfcfd7e8005cc2add915aeb8408f8b4674556 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/attr/_config.py b/parrot/lib/python3.10/site-packages/attr/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..9c245b1461abd5dc5143f69bc74c75ae50fabdc5 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + msg = "'run' must be bool." + raise TypeError(msg) + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/parrot/lib/python3.10/site-packages/attr/_make.py b/parrot/lib/python3.10/site-packages/attr/_make.py new file mode 100644 index 0000000000000000000000000000000000000000..bf00c5f8ceaa5c407fd45a7d1adb5f19571ea0ff --- /dev/null +++ b/parrot/lib/python3.10/site-packages/attr/_make.py @@ -0,0 +1,2960 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import abc +import contextlib +import copy +import enum +import functools +import inspect +import itertools +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + PY_3_8_PLUS, + PY_3_10_PLUS, + PY_3_11_PLUS, + _AnnotationExtractor, + _get_annotations, + get_generic_base, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_OBJ_SETATTR = object.__setattr__ +_INIT_FACTORY_PAT = "__attr_factory_%s" +_CLASSVAR_PREFIXES = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_HASH_CACHE_FIELD = "_attrs_cached_hash" + +_EMPTY_METADATA_SINGLETON = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_SENTINEL = object() + +_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when `None` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when `None` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since `None` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new field / attribute on a class. + + Identical to `attrs.field`, except it's not keyword-only. + + Consider using `attrs.field` in new code (``attr.ib`` will *never* go away, + though). + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attr.s` (or similar)! + + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is `None` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 + *converter* as a replacement for the deprecated *convert* to achieve + consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if factory is not None: + if default is not NOTHING: + msg = ( + "The `default` and `factory` arguments are mutually exclusive." + ) + raise ValueError(msg) + if not callable(factory): + msg = "The `factory` argument must be a callable." + raise ValueError(msg) + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + Evaluate the script with the given global (globs) and local (locs) + variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs, locals=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} if locals is None else locals + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_CLASSVAR_PREFIXES) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + return attrib_name in cls.__dict__ + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + If *collect_by_mro* is True, collect them in the correct MRO order, + otherwise use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = list(these.items()) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + a = attrib() if a is NOTHING else attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" + raise ValueError(msg) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +def _make_cached_property_getattr(cached_properties, original_getattr, cls): + lines = [ + # Wrapped to get `__class__` into closure cell for super() + # (It will be replaced with the newly constructed class after construction). + "def wrapper(_cls):", + " __class__ = _cls", + " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", + " func = cached_properties.get(item)", + " if func is not None:", + " result = func(self)", + " _setter = _cached_setattr_get(self)", + " _setter(item, result)", + " return result", + ] + if original_getattr is not None: + lines.append( + " return original_getattr(self, item)", + ) + else: + lines.extend( + [ + " try:", + " return super().__getattribute__(item)", + " except AttributeError:", + " if not hasattr(super(), '__getattr__'):", + " raise", + " return super().__getattr__(item)", + " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", + " raise AttributeError(original_error)", + ] + ) + + lines.extend( + [ + " return __getattr__", + "__getattr__ = wrapper(_cls)", + ] + ) + + unique_filename = _generate_unique_filename(cls, "getattr") + + glob = { + "cached_properties": cached_properties, + "_cached_setattr_get": _OBJ_SETATTR.__get__, + "original_getattr": original_getattr, + } + + return _make_method( + "__getattr__", + "\n".join(lines), + unique_filename, + glob, + locals={ + "_cls": cls, + }, + ) + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + "__traceback__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_pre_init_has_args", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._pre_init_has_args = False + if self._has_pre_init: + # Check if the pre init method has more arguments than just `self` + # We want to pass arguments if pre init expects arguments + pre_init_func = cls.__attrs_pre_init__ + pre_init_signature = inspect.signature(pre_init_func) + self._pre_init_has_args = len(pre_init_signature.parameters) > 1 + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _DEFAULT_ON_SETATTR, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _DEFAULT_ON_SETATTR + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + cls = self._create_slots_class() + else: + cls = self._patch_original_class() + if PY_3_10_PLUS: + cls = abc.update_abstractmethods(cls) + + # The method gets only called if it's not inherited from a base class. + # _has_own_attribute does NOT work properly for classmethods. + if ( + getattr(cls, "__attrs_init_subclass__", None) + and "__attrs_init_subclass__" not in cls.__dict__ + ): + cls.__attrs_init_subclass__() + + return cls + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _SENTINEL) is not _SENTINEL + ): + # An AttributeError can happen if a base class defines a + # class variable and we want to set an attribute with the + # same name by using only a type annotation. + with contextlib.suppress(AttributeError): + delattr(cls, name) + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _OBJ_SETATTR + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _OBJ_SETATTR + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = {} + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + if PY_3_8_PLUS: + cached_properties = { + name: cached_property.func + for name, cached_property in cd.items() + if isinstance(cached_property, functools.cached_property) + } + else: + # `functools.cached_property` was introduced in 3.8. + # So can't be used before this. + cached_properties = {} + + # Collect methods with a `__class__` reference that are shadowed in the new class. + # To know to update them. + additional_closure_functions_to_update = [] + if cached_properties: + class_annotations = _get_annotations(self._cls) + for name, func in cached_properties.items(): + # Add cached properties to names for slotting. + names += (name,) + # Clear out function from class to avoid clashing. + del cd[name] + additional_closure_functions_to_update.append(func) + annotation = inspect.signature(func).return_annotation + if annotation is not inspect.Parameter.empty: + class_annotations[name] = annotation + + original_getattr = cd.get("__getattr__") + if original_getattr is not None: + additional_closure_functions_to_update.append(original_getattr) + + cd["__getattr__"] = _make_cached_property_getattr( + cached_properties, original_getattr, self._cls + ) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_HASH_CACHE_FIELD) + + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in itertools.chain( + cls.__dict__.values(), additional_closure_functions_to_update + ): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # noqa: PERF203 + # ValueError: Cell is empty + pass + else: + if match: + cell.cell_contents = cls + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + msg = "__str__ can only be generated if a __repr__ exists." + raise ValueError(msg) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _OBJ_SETATTR.__get__(self) + if isinstance(state, tuple): + # Backward compatibility with attrs instances pickled with + # attrs versions before v22.2.0 which stored tuples. + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + else: + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_HASH_CACHE_FIELD, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + msg = "Can't combine custom __setattr__ with on_setattr hooks." + raise ValueError(msg) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _OBJ_SETATTR(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + with contextlib.suppress(AttributeError): + method.__module__ = self._cls.__module__ + + with contextlib.suppress(AttributeError): + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" + + with contextlib.suppress(AttributeError): + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will + *never* go away, though). + + Args: + repr_ns (str): + When using nested classes, there was no way in Python 2 to + automatically detect that. This argument allows to set a custom + name for a more meaningful ``repr`` output. This argument is + pointless in Python 3 and is therefore deprecated. + + .. caution:: + Refer to `attrs.define` for the rest of the parameters, but note that they + can have different defaults. + + Notably, leaving *on_setattr* as `None` will **not** add any hooks. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports `None` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. deprecated:: 24.1.0 *repr_ns* + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + """ + if repr_ns is not None: + import warnings + + warnings.warn( + DeprecationWarning( + "The `repr_ns` argument is deprecated and will be removed in or after August 2025." + ), + stacklevel=2, + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + msg = "Can't freeze a class with a custom __setattr__." + raise ValueError(msg) + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, init must be True." + raise TypeError(msg) + + if ( + PY_3_10_PLUS + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_HASH_CACHE_FIELD} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})" + ) + else: + lines.append(f" self.{a.name} == other.{a.name}") + if a is not attrs[-1]: + lines[-1] = f"{lines[-1]} and" + lines.append(" )") + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of *attrs* attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + .. versionchanged:: 23.1.0 Add support for generic classes. + """ + generic_base = get_generic_base(cls) + + if generic_base is None and not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + + attrs = getattr(cls, "__attrs_attrs__", None) + + if attrs is None: + if generic_base is not None: + attrs = getattr(generic_base, "__attrs_attrs__", None) + if attrs is not None: + # Even though this is global state, stick it on here to speed + # it up. We rely on `cls` being cached for this to be + # efficient. + cls.__attrs_attrs__ = attrs + return attrs + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of *attrs* attributes for a class, whose keys + are the attribute names. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + dict[str, attrs.Attribute]: Dict of attribute name to definition + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + Args: + inst: Instance of a class with *attrs* attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + cls = base_attr_map.get(a_name) + return cls and "__slots__" in cls.__dict__ + + +def _make_init( + cls, + attrs, + pre_init, + pre_init_has_args, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + pre_init_has_args, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + "__attrs_init__" if attrs_init else "__init__", + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})" + + +def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True, converter) + + return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}" + + +def _determine_setters( + frozen: bool, slots: bool, base_attr_map: dict[str, type] +): + """ + Determine the correct setter functions based on whether a class is frozen + and/or slotted. + """ + if frozen is True: + if slots is True: + return (), _setattr, _setattr_with_converter + + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + + def fmt_setter( + attr_name: str, value_var: str, has_on_setattr: bool + ) -> str: + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name: str, + value_var: str, + has_on_setattr: bool, + converter: Converter, + ) -> str: + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr, converter + ) + + return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}" + + return ( + ("_inst_dict = self.__dict__",), + fmt_setter, + fmt_setter_with_converter, + ) + + # Not frozen -- we can just assign directly. + return (), _assign, _assign_with_converter + + +def _attrs_to_init_script( + attrs: list[Attribute], + is_frozen: bool, + is_slotted: bool, + call_pre_init: bool, + pre_init_has_args: bool, + call_post_init: bool, + does_cache_hash: bool, + base_attr_map: dict[str, type], + is_exc: bool, + needs_cached_setattr: bool, + has_cls_on_setattr: bool, + method_name: str, +) -> tuple[str, dict, dict]: + """ + Return a script of an initializer for *attrs*, a dict of globals, and + annotations for the initializer. + + The globals are required by the generated script. + """ + lines = ["self.__attrs_pre_init__()"] if call_pre_init else [] + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. Note _setattr will be used again below if + # does_cache_hash is True. + "_setattr = _cached_setattr_get(self)" + ) + + extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters( + is_frozen, is_slotted, base_attr_map + ) + lines.extend(extra_lines) + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + maybe_self = "self" if has_factory and a.default.takes_self else "" + + if a.converter and not isinstance(a.converter, Converter): + converter = Converter(a.converter) + else: + converter = a.converter + + if a.init is False: + if has_factory: + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + elif converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and converter is None: + annotations[arg_name] = a.type + elif converter is not None and converter._first_param_type: + # Use the type from the converter if present. + annotations[arg_name] = converter._first_param_type + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if call_post_init: + lines.append("self.__attrs_post_init__()") + + # Because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to field + # values during post-init combined with post-init accessing the hash code + # would result in silent bugs. + if does_cache_hash: + if is_frozen: + if is_slotted: + init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)" + else: + init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None" + else: + init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None" + lines.append(init_hash_cache) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + pre_init_args = args + if kw_only_args: + # leading comma & kw_only args + args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}" + pre_init_kw_only_args = ", ".join( + [ + f"{kw_arg_name}={kw_arg_name}" + # We need to remove the defaults from the kw_only_args. + for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args) + ] + ) + pre_init_args += ", " if pre_init_args else "" + pre_init_args += pre_init_kw_only_args + + if call_pre_init and pre_init_has_args: + # If pre init method has arguments, pass same arguments as `__init__`. + lines[0] = f"self.__attrs_pre_init__({pre_init_args})" + + # Python 3.7 doesn't allow backslashes in f strings. + NL = "\n " + return ( + f"""def {method_name}(self, {args}): + {NL.join(lines) if lines else 'pass'} +""", + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + .. warning:: + + You should never instantiate this class yourself. + + The class has *all* arguments of `attr.ib` (except for ``factory`` which is + only syntactic sugar for ``default=Factory(...)`` plus the following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The + callables that are used for comparing and ordering objects by this + attribute, respectively. These are set by passing a callable to + `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also + :ref:`comparison customization `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _OBJ_SETATTR.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _EMPTY_METADATA_SINGLETON + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + msg = "Type annotation and type argument cannot both be present" + raise ValueError(msg) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attrs.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attrs.evolve` but that function does not work + with {class}`Attribute`. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _OBJ_SETATTR.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + ( + types.MappingProxyType(dict(value)) + if value + else _EMPTY_METADATA_SINGLETON + ), + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = ( + *tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ), + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + Raises: + DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + Args: + factory (typing.Callable): + A callable that takes either none or exactly one mandatory + positional argument depending on *takes_self*. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +class Converter: + """ + Stores a converter callable. + + Allows for the wrapped converter to take additional arguments. The + arguments are passed in the order they are documented. + + Args: + converter (Callable): A callable that converts the passed value. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. (default: `False`) + + takes_field (bool): + Pass the field definition (an :class:`Attribute`) into the + converter as a positional argument. (default: `False`) + + .. versionadded:: 24.1.0 + """ + + __slots__ = ( + "converter", + "takes_self", + "takes_field", + "_first_param_type", + "_global_name", + "__call__", + ) + + def __init__(self, converter, *, takes_self=False, takes_field=False): + self.converter = converter + self.takes_self = takes_self + self.takes_field = takes_field + + ex = _AnnotationExtractor(converter) + self._first_param_type = ex.get_first_param_type() + + if not (self.takes_self or self.takes_field): + self.__call__ = lambda value, _, __: self.converter(value) + elif self.takes_self and not self.takes_field: + self.__call__ = lambda value, instance, __: self.converter( + value, instance + ) + elif not self.takes_self and self.takes_field: + self.__call__ = lambda value, __, field: self.converter( + value, field + ) + else: + self.__call__ = lambda value, instance, field: self.converter( + value, instance, field + ) + + rt = ex.get_return_type() + if rt is not None: + self.__call__.__annotations__["return"] = rt + + @staticmethod + def _get_global_name(attr_name: str) -> str: + """ + Return the name that a converter for an attribute name *attr_name* + would have. + """ + return f"__attr_converter_{attr_name}" + + def _fmt_converter_call(self, attr_name: str, value_var: str) -> str: + """ + Return a string that calls the converter for an attribute name + *attr_name* and the value in variable named *value_var* according to + `self.takes_self` and `self.takes_field`. + """ + if not (self.takes_self or self.takes_field): + return f"{self._get_global_name(attr_name)}({value_var})" + + if self.takes_self and self.takes_field: + return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])" + + if self.takes_self: + return f"{self._get_global_name(attr_name)}({value_var}, self)" + + return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])" + + def __getstate__(self): + """ + Return a dict containing only converter and takes_self -- the rest gets + computed when loading. + """ + return { + "converter": self.converter, + "takes_self": self.takes_self, + "takes_field": self.takes_field, + } + + def __setstate__(self, state): + """ + Load instance from state. + """ + self.__init__(**state) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in ("converter", "takes_self", "takes_field") +] + +Converter = _add_hash( + _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f +) + + +def make_class( + name, attrs, bases=(object,), class_body=None, **attributes_arguments +): + r""" + A quick way to create a new class called *name* with *attrs*. + + Args: + name (str): The name for the new class. + + attrs( list | dict): + A list of names or a dictionary of mappings of names to `attr.ib`\ + s / `attrs.field`\ s. + + The order is deduced from the order of the names or attributes + inside *attrs*. Otherwise the order of the definition of the + attributes is used. + + bases (tuple[type, ...]): Classes that the new class will subclass. + + class_body (dict): + An optional dictionary of class attributes for the new class. + + attributes_arguments: Passed unmodified to `attr.s`. + + Returns: + type: A new class with *attrs*. + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + .. versionchanged:: 23.2.0 *class_body* + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + msg = "attrs argument must be a dict or a list." + raise TypeError(msg) + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if class_body is not None: + body.update(class_body) + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + with contextlib.suppress(AttributeError, ValueError): + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + cls = _attrs(these=cls_dict, **attributes_arguments)(type_) + # Only add type annotations now or "_attrs()" will complain: + cls.__annotations__ = { + k: v.type for k, v in cls_dict.items() if v.type is not None + } + return cls + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, unsafe_hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if they + have any. + + converters (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val, inst, field): + for c in converters: + val = c(val, inst, field) if isinstance(c, Converter) else c(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__.update({"val": A, "return": A}) + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + last = converters[-1] + if not PY_3_11_PLUS and isinstance(last, Converter): + last = last.__call__ + + # Get return type from last converter. + rt = _AnnotationExtractor(last).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return Converter(pipe_converter, takes_self=True, takes_field=True) diff --git a/parrot/lib/python3.10/site-packages/attr/filters.py b/parrot/lib/python3.10/site-packages/attr/filters.py new file mode 100644 index 0000000000000000000000000000000000000000..689b1705a60ff110d6077bab996f8b4588e55b82 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/attr/filters.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attrs.asdict` and `attrs.astuple`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, str)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Create a filter that only allows *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to include. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.1.0 Accept strings with field names. + """ + cls, names, attrs = _split_what(what) + + def include_(attribute, value): + return ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return include_ + + +def exclude(*what): + """ + Create a filter that does **not** allow *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to exclude. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.3.0 Accept field name string as input argument + """ + cls, names, attrs = _split_what(what) + + def exclude_(attribute, value): + return not ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return exclude_ diff --git a/parrot/lib/python3.10/site-packages/attr/setters.py b/parrot/lib/python3.10/site-packages/attr/setters.py new file mode 100644 index 0000000000000000000000000000000000000000..a9ce01698a5811478a8e68414ac6d153b7da85ed --- /dev/null +++ b/parrot/lib/python3.10/site-packages/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + # This can be removed once we drop 3.8 and use attrs.Converter instead. + from ._make import Converter + + if not isinstance(c, Converter): + return c(new_value) + + return c(new_value, instance, attrib) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# Sphinx's autodata stopped working, so the docstring is inlined in the API +# docs. +NO_OP = object() diff --git a/parrot/lib/python3.10/site-packages/attr/setters.pyi b/parrot/lib/python3.10/site-packages/attr/setters.pyi new file mode 100644 index 0000000000000000000000000000000000000000..73abf36e7d5b0f5f56e7fddeee716824c1c60d58 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/attr/setters.pyi @@ -0,0 +1,20 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute +from attrs import _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/INSTALLER b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/METADATA b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..34829fef47846b764c0f8032f16f66bcbc4807ab --- /dev/null +++ b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/METADATA @@ -0,0 +1,419 @@ +Metadata-Version: 2.2 +Name: deepspeed +Version: 0.14.0 +Summary: DeepSpeed library +Home-page: http://deepspeed.ai +Author: DeepSpeed Team +Author-email: deepspeed-info@microsoft.com +License: Apache Software License 2.0 +Project-URL: Documentation, https://deepspeed.readthedocs.io +Project-URL: Source, https://github.com/microsoft/DeepSpeed +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Description-Content-Type: text/markdown +Requires-Dist: hjson +Requires-Dist: ninja +Requires-Dist: numpy +Requires-Dist: packaging>=20.0 +Requires-Dist: psutil +Requires-Dist: py-cpuinfo +Requires-Dist: pydantic +Requires-Dist: pynvml +Requires-Dist: torch +Requires-Dist: tqdm +Provides-Extra: 1bit +Provides-Extra: 1bit-mpi +Requires-Dist: mpi4py; extra == "1bit-mpi" +Provides-Extra: readthedocs +Requires-Dist: autodoc_pydantic; extra == "readthedocs" +Requires-Dist: docutils<0.18; extra == "readthedocs" +Requires-Dist: hjson; extra == "readthedocs" +Requires-Dist: packaging; extra == "readthedocs" +Requires-Dist: psutil; extra == "readthedocs" +Requires-Dist: py-cpuinfo; extra == "readthedocs" +Requires-Dist: pydantic<2.0.0; extra == "readthedocs" +Requires-Dist: recommonmark; extra == "readthedocs" +Requires-Dist: sphinx_rtd_theme; extra == "readthedocs" +Requires-Dist: torch; extra == "readthedocs" +Requires-Dist: tqdm; extra == "readthedocs" +Provides-Extra: dev +Requires-Dist: accelerate; extra == "dev" +Requires-Dist: clang-format==16.0.2; extra == "dev" +Requires-Dist: deepspeed-kernels; sys_platform == "linux" and extra == "dev" +Requires-Dist: docutils<0.18; extra == "dev" +Requires-Dist: future; extra == "dev" +Requires-Dist: importlib-metadata>=4; extra == "dev" +Requires-Dist: mup; extra == "dev" +Requires-Dist: pre-commit>=2.20.0; extra == "dev" +Requires-Dist: pytest<=8.0.0; extra == "dev" +Requires-Dist: pytest-forked; extra == "dev" +Requires-Dist: pytest-randomly; extra == "dev" +Requires-Dist: pytest-xdist; extra == "dev" +Requires-Dist: recommonmark; extra == "dev" +Requires-Dist: sphinx; extra == "dev" +Requires-Dist: sphinx-rtd-theme; extra == "dev" +Requires-Dist: tensorboard; extra == "dev" +Requires-Dist: torchvision; extra == "dev" +Requires-Dist: transformers>=4.32.1; extra == "dev" +Requires-Dist: wandb; extra == "dev" +Provides-Extra: autotuning +Requires-Dist: tabulate; extra == "autotuning" +Provides-Extra: autotuning-ml +Requires-Dist: hjson; extra == "autotuning-ml" +Requires-Dist: tabulate; extra == "autotuning-ml" +Requires-Dist: xgboost; extra == "autotuning-ml" +Provides-Extra: sparse-attn +Requires-Dist: triton==1.0.0; extra == "sparse-attn" +Provides-Extra: sparse +Requires-Dist: neural-compressor==2.1.0; extra == "sparse" +Provides-Extra: inf +Requires-Dist: google; extra == "inf" +Requires-Dist: lm-eval==0.3.0; extra == "inf" +Requires-Dist: protobuf; extra == "inf" +Requires-Dist: qtorch; extra == "inf" +Requires-Dist: safetensors; extra == "inf" +Requires-Dist: sentencepiece; extra == "inf" +Requires-Dist: transformers>=4.32.1; extra == "inf" +Provides-Extra: sd +Requires-Dist: diffusers>=0.25.0; extra == "sd" +Requires-Dist: triton>=2.1.0; extra == "sd" +Provides-Extra: triton +Requires-Dist: triton==2.1.0; extra == "triton" +Provides-Extra: all +Requires-Dist: torch; extra == "all" +Requires-Dist: importlib-metadata>=4; extra == "all" +Requires-Dist: torchvision; extra == "all" +Requires-Dist: sphinx; extra == "all" +Requires-Dist: qtorch; extra == "all" +Requires-Dist: pytest-forked; extra == "all" +Requires-Dist: mup; extra == "all" +Requires-Dist: autodoc_pydantic; extra == "all" +Requires-Dist: pytest<=8.0.0; extra == "all" +Requires-Dist: transformers>=4.32.1; extra == "all" +Requires-Dist: triton==1.0.0; extra == "all" +Requires-Dist: pre-commit>=2.20.0; extra == "all" +Requires-Dist: triton>=2.1.0; extra == "all" +Requires-Dist: sphinx_rtd_theme; extra == "all" +Requires-Dist: lm-eval==0.3.0; extra == "all" +Requires-Dist: psutil; extra == "all" +Requires-Dist: wandb; extra == "all" +Requires-Dist: accelerate; extra == "all" +Requires-Dist: tqdm; extra == "all" +Requires-Dist: py-cpuinfo; extra == "all" +Requires-Dist: tensorboard; extra == "all" +Requires-Dist: packaging; extra == "all" +Requires-Dist: hjson; extra == "all" +Requires-Dist: protobuf; extra == "all" +Requires-Dist: pydantic<2.0.0; extra == "all" +Requires-Dist: tabulate; extra == "all" +Requires-Dist: sentencepiece; extra == "all" +Requires-Dist: recommonmark; extra == "all" +Requires-Dist: clang-format==16.0.2; extra == "all" +Requires-Dist: xgboost; extra == "all" +Requires-Dist: pytest-randomly; extra == "all" +Requires-Dist: google; extra == "all" +Requires-Dist: triton==2.1.0; extra == "all" +Requires-Dist: deepspeed-kernels; sys_platform == "linux" and extra == "all" +Requires-Dist: sphinx-rtd-theme; extra == "all" +Requires-Dist: safetensors; extra == "all" +Requires-Dist: mpi4py; extra == "all" +Requires-Dist: docutils<0.18; extra == "all" +Requires-Dist: future; extra == "all" +Requires-Dist: diffusers>=0.25.0; extra == "all" +Requires-Dist: pytest-xdist; extra == "all" +Requires-Dist: neural-compressor==2.1.0; extra == "all" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: license +Dynamic: project-url +Dynamic: provides-extra +Dynamic: requires-dist +Dynamic: summary + +[![License Apache 2.0](https://badgen.net/badge/license/apache2.0/blue)](https://github.com/Microsoft/DeepSpeed/blob/master/LICENSE) +[![PyPI version](https://badge.fury.io/py/deepspeed.svg)](https://pypi.org/project/deepspeed/) +[![Downloads](https://static.pepy.tech/badge/deepspeed)](https://pepy.tech/project/deepspeed) +[![Build](https://badgen.net/badge/build/check-status/blue)](#build-pipeline-status) +[![Twitter](https://img.shields.io/twitter/follow/MSFTDeepSpeed)](https://twitter.com/intent/follow?screen_name=MSFTDeepSpeed) +[![Japanese Twitter](https://img.shields.io/badge/%E6%97%A5%E6%9C%AC%E8%AA%9ETwitter-%40MSFTDeepSpeedJP-blue)](https://twitter.com/MSFTDeepSpeedJP) +[![Chinese Zhihu](https://img.shields.io/badge/%E7%9F%A5%E4%B9%8E-%E5%BE%AE%E8%BD%AFDeepSpeed-blue)](https://www.zhihu.com/people/deepspeed) + + +
+ + +
+ +## Latest News + DeepSpeed empowers ChatGPT-like model training with a single click, offering 15x speedup over SOTA RLHF systems with unprecedented cost reduction at all scales; [learn how](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-chat). + +* [2024/03] [DeepSpeed-FP6:The power of FP6-Centric Serving for Large Language Models](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fp6/03-05-2024) +* [2024/01] [DeepSpeed-FastGen: Introducting Mixtral, Phi-2, and Falcon support with major performance and feature enhancements.](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen/2024-01-19) +* [2023/11] [Llama 2 Inference on 4th Gen Intel® Xeon® Scalable Processor with DeepSpeed](https://github.com/microsoft/DeepSpeed/tree/master/blogs/intel-inference) [[Intel version]](https://www.intel.com/content/www/us/en/developer/articles/technical/xllama-2-on-xeon-scalable-processor-with-deepspeed.html) +* [2023/11] [DeepSpeed ZeRO-Offload++: 6x Higher Training Throughput via Collaborative CPU/GPU Twin-Flow](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-offloadpp) +* [2023/11] [DeepSpeed-FastGen: High-throughput Text Generation for LLMs via MII and DeepSpeed-Inference](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen) [[English](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen)] [[中文](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen/chinese/README.md)] [[日本語](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-fastgen/japanese/README.md)] +* [2023/10] [DeepSpeed-VisualChat: Improve Your Chat Experience with Multi-Round Multi-Image Inputs](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-visualchat/10-03-2023/README.md) [[English](https://github.com/microsoft/DeepSpeed/tree/master/blogs/deepspeed-visualchat/10-03-2023/README.md)] [[中文](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed-visualchat/10-03-2023/README-Chinese.md)] [[日本語](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed-visualchat/10-03-2023/README-Japanese.md)] +* [2023/09] Announcing the DeepSpeed4Science Initiative: Enabling large-scale scientific discovery through sophisticated AI system technologies [[DeepSpeed4Science website](https://deepspeed4science.ai/)] [[Tutorials](https://www.deepspeed.ai/deepspeed4science/)] [[White paper](https://arxiv.org/abs/2310.04610)] [[Blog](https://www.microsoft.com/en-us/research/blog/announcing-the-deepspeed4science-initiative-enabling-large-scale-scientific-discovery-through-sophisticated-ai-system-technologies/)] [[中文](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed4science/chinese/README.md)] [[日本語](https://github.com/microsoft/DeepSpeed/blob/master/blogs/deepspeed4science/japanese/README.md)] + + + +
+ More news + +
+ +--- + +# Extreme Speed and Scale for DL Training and Inference + +***[DeepSpeed](https://www.deepspeed.ai/) enables world's most powerful language models like [MT-530B](https://www.microsoft.com/en-us/research/blog/using-deepspeed-and-megatron-to-train-megatron-turing-nlg-530b-the-worlds-largest-and-most-powerful-generative-language-model/) and [BLOOM](https://huggingface.co/blog/bloom-megatron-deepspeed)***. It is an easy-to-use deep learning optimization software suite that powers unprecedented scale and speed for both training and inference. With DeepSpeed you can: + +* Train/Inference dense or sparse models with billions or trillions of parameters +* Achieve excellent system throughput and efficiently scale to thousands of GPUs +* Train/Inference on resource constrained GPU systems +* Achieve unprecedented low latency and high throughput for inference +* Achieve extreme compression for an unparalleled inference latency and model size reduction with low costs + +--- + +# DeepSpeed's four innovation pillars + + + + +## DeepSpeed-Training + +DeepSpeed offers a confluence of system innovations, that has made large scale DL training effective, and efficient, greatly improved ease of use, and redefined the DL training landscape in terms of scale that is possible. These innovations such as ZeRO, 3D-Parallelism, DeepSpeed-MoE, ZeRO-Infinity, etc. fall under the training pillar. Learn more: [DeepSpeed-Training](https://www.deepspeed.ai/training/) + +## DeepSpeed-Inference + +DeepSpeed brings together innovations in parallelism technology such as tensor, pipeline, expert and ZeRO-parallelism, and combines them with high performance custom inference kernels, communication optimizations and heterogeneous memory technologies to enable inference at an unprecedented scale, while achieving unparalleled latency, throughput and cost reduction. This systematic composition of system technologies for inference falls under the inference pillar. Learn more: [DeepSpeed-Inference](https://www.deepspeed.ai/inference) + + +## DeepSpeed-Compression + +To further increase the inference efficiency, DeepSpeed offers easy-to-use and flexible-to-compose compression techniques for researchers and practitioners to compress their models while delivering faster speed, smaller model size, and significantly reduced compression cost. Moreover, SoTA innovations on compression like ZeroQuant and XTC are included under the compression pillar. Learn more: [DeepSpeed-Compression](https://www.deepspeed.ai/compression) + +## DeepSpeed4Science + +In line with Microsoft's mission to solve humanity's most pressing challenges, the DeepSpeed team at Microsoft is responding to this opportunity by launching a new initiative called *DeepSpeed4Science*, aiming to build unique capabilities through AI system technology innovations to help domain experts to unlock today's biggest science mysteries. Learn more: [DeepSpeed4Science website](https://deepspeed4science.ai/) and [tutorials](https://www.deepspeed.ai/deepspeed4science/) + +--- + +# DeepSpeed Software Suite + +## DeepSpeed Library + + The [DeepSpeed](https://github.com/microsoft/deepspeed) library (this repository) implements and packages the innovations and technologies in DeepSpeed Training, Inference and Compression Pillars into a single easy-to-use, open-sourced repository. It allows for easy composition of multitude of features within a single training, inference or compression pipeline. The DeepSpeed Library is heavily adopted by the DL community, and has been used to enable some of the most powerful models (see [DeepSpeed Adoption](#deepspeed-adoption)). + +## Model Implementations for Inference (MII) + + [Model Implementations for Inference (MII)](https://github.com/microsoft/deepspeed-mii) is an open-sourced repository for making low-latency and high-throughput inference accessible to all data scientists by alleviating the need to apply complex system optimization techniques themselves. Out-of-box, MII offers support for thousands of widely used DL models, optimized using DeepSpeed-Inference, that can be deployed with a few lines of code, while achieving significant latency reduction compared to their vanilla open-sourced versions. + +## DeepSpeed on Azure + + DeepSpeed users are diverse and have access to different environments. We recommend to try DeepSpeed on Azure as it is the simplest and easiest method. The recommended method to try DeepSpeed on Azure is through AzureML [recipes](https://github.com/Azure/azureml-examples/tree/main/v1/python-sdk/workflows/train/deepspeed). The job submission and data preparation scripts have been made available [here](https://github.com/microsoft/Megatron-DeepSpeed/tree/main/examples_deepspeed/azureml). For more details on how to use DeepSpeed on Azure, please follow the [Azure tutorial](https://www.deepspeed.ai/tutorials/azure/). + +--- + +# DeepSpeed Adoption + +DeepSpeed is an important part of Microsoft’s new +[AI at Scale](https://www.microsoft.com/en-us/research/project/ai-at-scale/) +initiative to enable next-generation AI capabilities at scale, where you can find more +information [here](https://innovation.microsoft.com/en-us/exploring-ai-at-scale). + +DeepSpeed has been used to train many different large-scale models, below is a list of several examples that we are aware of (if you'd like to include your model please submit a PR): + + * [Megatron-Turing NLG (530B)](https://www.microsoft.com/en-us/research/blog/using-deepspeed-and-megatron-to-train-megatron-turing-nlg-530b-the-worlds-largest-and-most-powerful-generative-language-model/) + * [Jurassic-1 (178B)](https://uploads-ssl.webflow.com/60fd4503684b466578c0d307/61138924626a6981ee09caf6_jurassic_tech_paper.pdf) + * [BLOOM (176B)](https://huggingface.co/blog/bloom-megatron-deepspeed) + * [GLM (130B)](https://github.com/THUDM/GLM-130B) + * [xTrimoPGLM (100B)](https://www.biorxiv.org/content/10.1101/2023.07.05.547496v2) + * [YaLM (100B)](https://github.com/yandex/YaLM-100B) + * [GPT-NeoX (20B)](https://github.com/EleutherAI/gpt-neox) + * [AlexaTM (20B)](https://www.amazon.science/blog/20b-parameter-alexa-model-sets-new-marks-in-few-shot-learning) + * [Turing NLG (17B)](https://www.microsoft.com/en-us/research/blog/turing-nlg-a-17-billion-parameter-language-model-by-microsoft/) + * [METRO-LM (5.4B)](https://arxiv.org/pdf/2204.06644.pdf) + +DeepSpeed has been integrated with several different popular open-source DL frameworks such as: + +| | Documentation | +| ---------------------------------------------------------------------------------------------- | -------------------------------------------- | + | [Transformers with DeepSpeed](https://huggingface.co/docs/transformers/main/main_classes/deepspeed) | +| | [Accelerate with DeepSpeed](https://huggingface.co/docs/accelerate/usage_guides/deepspeed) | +| | [Lightning with DeepSpeed](https://lightning.ai/docs/pytorch/stable/advanced/model_parallel.html#deepspeed) | +| | [MosaicML with DeepSpeed](https://docs.mosaicml.com/projects/composer/en/latest/trainer/using_the_trainer.html?highlight=deepspeed#deepspeed-integration) | +| | [Determined with DeepSpeed](https://docs.determined.ai/latest/training/apis-howto/deepspeed/overview.html) | +| | [MMEngine with DeepSpeed](https://mmengine.readthedocs.io/en/latest/common_usage/large_model_training.html#deepspeed) | + +--- + +# Build Pipeline Status + +| Description | Status | +| ----------- | ------ | +| NVIDIA | [![nv-torch110-p40](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-p40.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-p40.yml) [![nv-torch110-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch110-v100.yml) [![nv-torch-latest-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-latest-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-latest-v100.yml) [![nv-h100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-h100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-h100.yml) [![nv-inference](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-inference.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-inference.yml) [![nv-nightly](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-nightly.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-nightly.yml) | +| AMD | [![amd-mi200](https://github.com/microsoft/DeepSpeed/actions/workflows/amd-mi200.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/amd-mi200.yml) | +| CPU | [![nv-torch-latest-cpu](https://github.com/microsoft/DeepSpeed/actions/workflows/cpu-torch-latest.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/cpu-torch-latest.yml) | +| PyTorch Nightly | [![nv-torch-nightly-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-nightly-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-torch-nightly-v100.yml) | +| Integrations | [![nv-transformers-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-transformers-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-transformers-v100.yml) [![nv-lightning-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-lightning-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-lightning-v100.yml) [![nv-accelerate-v100](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-accelerate-v100.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-accelerate-v100.yml) [![nv-mii](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-mii.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-mii.yml) [![nv-ds-chat](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-ds-chat.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-ds-chat.yml) [![nv-sd](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-sd.yml/badge.svg)](https://github.com/microsoft/DeepSpeed/actions/workflows/nv-sd.yml) | +| Misc | [![Formatting](https://github.com/microsoft/DeepSpeed/actions/workflows/formatting.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/formatting.yml) [![pages-build-deployment](https://github.com/microsoft/DeepSpeed/actions/workflows/pages/pages-build-deployment/badge.svg)](https://github.com/microsoft/DeepSpeed/actions/workflows/pages/pages-build-deployment) [![Documentation Status](https://readthedocs.org/projects/deepspeed/badge/?version=latest)](https://deepspeed.readthedocs.io/en/latest/?badge=latest)[![python](https://github.com/microsoft/DeepSpeed/actions/workflows/python.yml/badge.svg?branch=master)](https://github.com/microsoft/DeepSpeed/actions/workflows/python.yml) | + +# Installation + +The quickest way to get started with DeepSpeed is via pip, this will install +the latest release of DeepSpeed which is not tied to specific PyTorch or CUDA +versions. DeepSpeed includes several C++/CUDA extensions that we commonly refer +to as our 'ops'. By default, all of these extensions/ops will be built +just-in-time (JIT) using [torch's JIT C++ extension loader that relies on +ninja](https://pytorch.org/docs/stable/cpp_extension.html) to build and +dynamically link them at runtime. + +## Requirements +* [PyTorch](https://pytorch.org/) must be installed _before_ installing DeepSpeed. +* For full feature support we recommend a version of PyTorch that is >= 1.9 and ideally the latest PyTorch stable release. +* A CUDA or ROCm compiler such as [nvcc](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/#introduction) or [hipcc](https://github.com/ROCm-Developer-Tools/HIPCC) used to compile C++/CUDA/HIP extensions. +* Specific GPUs we develop and test against are listed below, this doesn't mean your GPU will not work if it doesn't fall into this category it's just DeepSpeed is most well tested on the following: + * NVIDIA: Pascal, Volta, Ampere, and Hopper architectures + * AMD: MI100 and MI200 + +## PyPI +We regularly push releases to [PyPI](https://pypi.org/project/deepspeed/) and encourage users to install from there in most cases. + +```bash +pip install deepspeed +``` + +After installation, you can validate your install and see which extensions/ops +your machine is compatible with via the DeepSpeed environment report. + +```bash +ds_report +``` + +If you would like to pre-install any of the DeepSpeed extensions/ops (instead +of JIT compiling) or install pre-compiled ops via PyPI please see our [advanced +installation instructions](https://www.deepspeed.ai/tutorials/advanced-install/). + +## Windows +Windows support is partially supported with DeepSpeed. On Windows you can build wheel with following steps, currently only inference mode is supported. +1. Install pytorch, such as pytorch 1.8 + cuda 11.1 +2. Install visual cpp build tools, such as VS2019 C++ x64/x86 build tools +3. Launch cmd console with Administrator privilege for creating required symlink folders +4. Run `python setup.py bdist_wheel` to build wheel in `dist` folder + +# Features + +Please checkout [DeepSpeed-Training](https://www.deepspeed.ai/training), [DeepSpeed-Inference](https://www.deepspeed.ai/inference) and [DeepSpeed-Compression](https://www.deepspeed.ai/compression) pages for full set of features offered along each of these three pillars. + +# Further Reading + +All DeepSpeed documentation, tutorials, and blogs can be found on our website: [deepspeed.ai](https://www.deepspeed.ai/) + + +| | Description | +| ---------------------------------------------------------------------------------------------- | -------------------------------------------- | +| [Getting Started](https://www.deepspeed.ai/getting-started/) | First steps with DeepSpeed | +| [DeepSpeed JSON Configuration](https://www.deepspeed.ai/docs/config-json/) | Configuring DeepSpeed | +| [API Documentation](https://deepspeed.readthedocs.io/en/latest/) | Generated DeepSpeed API documentation | +| [Tutorials](https://www.deepspeed.ai/tutorials/) | Tutorials | +| [Blogs](https://www.deepspeed.ai/posts/) | Blogs | + + +# Contributing +DeepSpeed welcomes your contributions! Please see our +[contributing](CONTRIBUTING.md) guide for more details on formatting, testing, +etc.
+Thanks so much to all of our amazing contributors! + + + + + +## Contributor License Agreement +This project welcomes contributions and suggestions. Most contributions require you to +agree to a Contributor License Agreement (CLA) declaring that you have the right to, and +actually do, grant us the rights to use your contribution. For details, visit +https://cla.opensource.microsoft.com. + +When you submit a pull request, a CLA bot will automatically determine whether you need +to provide a CLA and decorate the PR appropriately (e.g., status check, comment). Simply +follow the instructions provided by the bot. You will only need to do this once across +all repos using our CLA. + +## Code of Conduct +This project has adopted the [Microsoft Open Source Code of +Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the +[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact +[opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + +# Publications +1. Samyam Rajbhandari, Jeff Rasley, Olatunji Ruwase, Yuxiong He. (2019) ZeRO: memory optimizations toward training trillion parameter models. [arXiv:1910.02054](https://arxiv.org/abs/1910.02054) and [In Proceedings of the International Conference for High Performance Computing, Networking, Storage and Analysis (SC '20)](https://dl.acm.org/doi/10.5555/3433701.3433727). +2. Jeff Rasley, Samyam Rajbhandari, Olatunji Ruwase, and Yuxiong He. (2020) DeepSpeed: System Optimizations Enable Training Deep Learning Models with Over 100 Billion Parameters. [In Proceedings of the 26th ACM SIGKDD International Conference on Knowledge Discovery & Data Mining (KDD '20, Tutorial)](https://dl.acm.org/doi/10.1145/3394486.3406703). +3. Minjia Zhang, Yuxiong He. (2020) Accelerating Training of Transformer-Based Language Models with Progressive Layer Dropping. [arXiv:2010.13369](https://arxiv.org/abs/2010.13369) and [NeurIPS 2020](https://proceedings.neurips.cc/paper/2020/hash/a1140a3d0df1c81e24ae954d935e8926-Abstract.html). +4. Jie Ren, Samyam Rajbhandari, Reza Yazdani Aminabadi, Olatunji Ruwase, Shuangyan Yang, Minjia Zhang, Dong Li, Yuxiong He. (2021) ZeRO-Offload: Democratizing Billion-Scale Model Training. [arXiv:2101.06840](https://arxiv.org/abs/2101.06840) and [USENIX ATC 2021](https://www.usenix.org/conference/atc21/presentation/ren-jie). [[paper]](https://arxiv.org/abs/2101.06840) [[slides]](https://www.usenix.org/system/files/atc21_slides_ren-jie.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-extreme-scale-model-training-for-everyone/) +5. Hanlin Tang, Shaoduo Gan, Ammar Ahmad Awan, Samyam Rajbhandari, Conglong Li, Xiangru Lian, Ji Liu, Ce Zhang, Yuxiong He. (2021) 1-bit Adam: Communication Efficient Large-Scale Training with Adam's Convergence Speed. [arXiv:2102.02888](https://arxiv.org/abs/2102.02888) and [ICML 2021](http://proceedings.mlr.press/v139/tang21a.html). +6. Samyam Rajbhandari, Olatunji Ruwase, Jeff Rasley, Shaden Smith, Yuxiong He. (2021) ZeRO-Infinity: Breaking the GPU Memory Wall for Extreme Scale Deep Learning. [arXiv:2104.07857](https://arxiv.org/abs/2104.07857) and [SC 2021](https://dl.acm.org/doi/abs/10.1145/3458817.3476205). [[paper]](https://arxiv.org/abs/2104.07857) [[slides]](docs/assets/files/SC21-ZeRO-Infinity.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/zero-infinity-and-deepspeed-unlocking-unprecedented-model-scale-for-deep-learning-training/) +7. Conglong Li, Ammar Ahmad Awan, Hanlin Tang, Samyam Rajbhandari, Yuxiong He. (2021) 1-bit LAMB: Communication Efficient Large-Scale Large-Batch Training with LAMB's Convergence Speed. [arXiv:2104.06069](https://arxiv.org/abs/2104.06069) and [HiPC 2022](https://hipc.org/advance-program/). +8. Conglong Li, Minjia Zhang, Yuxiong He. (2021) The Stability-Efficiency Dilemma: Investigating Sequence Length Warmup for Training GPT Models. [arXiv:2108.06084](https://arxiv.org/abs/2108.06084) and [NeurIPS 2022](https://openreview.net/forum?id=JpZ5du_Kdh). +9. Yucheng Lu, Conglong Li, Minjia Zhang, Christopher De Sa, Yuxiong He. (2022) Maximizing Communication Efficiency for Large-scale Training via 0/1 Adam. [arXiv:2202.06009](https://arxiv.org/abs/2202.06009). +10. Samyam Rajbhandari, Conglong Li, Zhewei Yao, Minjia Zhang, Reza Yazdani Aminabadi, Ammar Ahmad Awan, Jeff Rasley, Yuxiong He. (2022) DeepSpeed-MoE: Advancing Mixture-of-Experts Inference and Training to Power Next-Generation AI Scale [arXiv:2201.05596](https://arxiv.org/abs/2201.05596) and [ICML 2022](https://proceedings.mlr.press/v162/rajbhandari22a.html). [[pdf]](https://arxiv.org/abs/2201.05596) [[slides]](docs/assets/files/ICML-5mins.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-advancing-moe-inference-and-training-to-power-next-generation-ai-scale/) +11. Shaden Smith, Mostofa Patwary, Brandon Norick, Patrick LeGresley, Samyam Rajbhandari, Jared Casper, Zhun Liu, Shrimai Prabhumoye, George Zerveas, Vijay Korthikanti, Elton Zhang, Rewon Child, Reza Yazdani Aminabadi, Julie Bernauer, Xia Song, Mohammad Shoeybi, Yuxiong He, Michael Houston, Saurabh Tiwary, Bryan Catanzaro. (2022) Using DeepSpeed and Megatron to Train Megatron-Turing NLG 530B, A Large-Scale Generative Language Model [arXiv:2201.11990](https://arxiv.org/abs/2201.11990). +12. Xiaoxia Wu, Zhewei Yao, Minjia Zhang, Conglong Li, Yuxiong He. (2022) Extreme Compression for Pre-trained Transformers Made Simple and Efficient. [arXiv:2206.01859](https://arxiv.org/abs/2206.01859) and [NeurIPS 2022](https://openreview.net/forum?id=xNeAhc2CNAl). +13. Zhewei Yao, Reza Yazdani Aminabadi, Minjia Zhang, Xiaoxia Wu, Conglong Li, Yuxiong He. (2022) ZeroQuant: Efficient and Affordable Post-Training Quantization for Large-Scale Transformers. [arXiv:2206.01861](https://arxiv.org/abs/2206.01861) and [NeurIPS 2022](https://openreview.net/forum?id=f-fVCElZ-G1) [[slides]](docs/assets/files/zeroquant_series.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-compression-a-composable-library-for-extreme-compression-and-zero-cost-quantization/) +14. Reza Yazdani Aminabadi, Samyam Rajbhandari, Minjia Zhang, Ammar Ahmad Awan, Cheng Li, Du Li, Elton Zheng, Jeff Rasley, Shaden Smith, Olatunji Ruwase, Yuxiong He. (2022) DeepSpeed Inference: Enabling Efficient Inference of Transformer Models at Unprecedented Scale. [arXiv:2207.00032](https://arxiv.org/abs/2207.00032) and [SC 2022](https://dl.acm.org/doi/abs/10.5555/3571885.3571946). [[paper]](https://arxiv.org/abs/2207.00032) [[slides]](docs/assets/files/sc22-ds-inference.pdf) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-accelerating-large-scale-model-inference-and-training-via-system-optimizations-and-compression/) +15. Zhewei Yao, Xiaoxia Wu, Conglong Li, Connor Holmes, Minjia Zhang, Cheng Li, Yuxiong He. (2022) Random-LTD: Random and Layerwise Token Dropping Brings Efficient Training for Large-scale Transformers. [arXiv:2211.11586](https://arxiv.org/abs/2211.11586). +16. Conglong Li, Zhewei Yao, Xiaoxia Wu, Minjia Zhang, Yuxiong He. (2022) DeepSpeed Data Efficiency: Improving Deep Learning Model Quality and Training Efficiency via Efficient Data Sampling and Routing. [arXiv:2212.03597](https://arxiv.org/abs/2212.03597) [ENLSP2023 Workshop at NeurIPS2023](https://neurips2023-enlsp.github.io/) +17. Xiaoxia Wu, Cheng Li, Reza Yazdani Aminabadi, Zhewei Yao, Yuxiong He. (2023) Understanding INT4 Quantization for Transformer Models: Latency Speedup, Composability, and Failure Cases. [arXiv:2301.12017](https://arxiv.org/abs/2301.12017) and [ICML2023](https://icml.cc/Conferences/2023). +18. Syed Zawad, Cheng Li, Zhewei Yao, Elton Zheng, Yuxiong He, Feng Yan. (2023) DySR: Adaptive Super-Resolution via Algorithm and System Co-design. [ICLR:2023](https://openreview.net/forum?id=Pgtn4l6eKjv). +19. Sheng Shen, Zhewei Yao, Chunyuan Li, Trevor Darrell, Kurt Keutzer, Yuxiong He. (2023) Scaling Vision-Language Models with Sparse Mixture of Experts. [arXiv:2303.07226](https://arxiv.org/abs/2303.07226) and [Finding at EMNLP2023](https://2023.emnlp.org/). +20. Quentin Anthony, Ammar Ahmad Awan, Jeff Rasley, Yuxiong He, Aamir Shafi, Mustafa Abduljabbar, Hari Subramoni, Dhabaleswar Panda. (2023) MCR-DL: Mix-and-Match Communication Runtime for Deep Learning [arXiv:2303.08374](https://arxiv.org/abs/2303.08374) and will appear at IPDPS 2023. +21. Siddharth Singh, Olatunji Ruwase, Ammar Ahmad Awan, Samyam Rajbhandari, Yuxiong He, Abhinav Bhatele. (2023) A Hybrid Tensor-Expert-Data Parallelism Approach to Optimize Mixture-of-Experts Training [arXiv:2303.06318](https://arxiv.org/abs/2303.06318) and will appear at ICS 2023. +22. Guanhua Wang, Heyang Qin, Sam Ade Jacobs, Xiaoxia Wu, Connor Holmes, Zhewei Yao, Samyam Rajbhandari, Olatunji Ruwase, Feng Yan, Lei Yang, Yuxiong He. (2023) ZeRO++: Extremely Efficient Collective Communication for Giant Model Training [arXiv:2306.10209](https://arxiv.org/abs/2306.10209) and [ML for Sys Workshop at NeurIPS2023](http://mlforsystems.org/) [[blog]](https://www.microsoft.com/en-us/research/blog/deepspeed-zero-a-leap-in-speed-for-llm-and-chat-model-training-with-4x-less-communication/) +23. Zhewei Yao, Xiaoxia Wu, Cheng Li, Stephen Youn, Yuxiong He. (2023) ZeroQuant-V2: Exploring Post-training Quantization in LLMs from Comprehensive Study to Low Rank Compensation [arXiv:2303.08302](https://arxiv.org/abs/2303.08302) and [ENLSP2023 Workshop at NeurIPS2023](https://neurips2023-enlsp.github.io/) [[slides]](docs/assets/files/zeroquant_series.pdf) +24. Pareesa Ameneh Golnari, Zhewei Yao, Yuxiong He. (2023) Selective Guidance: Are All the Denoising Steps of Guided Diffusion Important? [arXiv:2305.09847](https://arxiv.org/abs/2305.09847) +25. Zhewei Yao, Reza Yazdani Aminabadi, Olatunji Ruwase, Samyam Rajbhandari, Xiaoxia Wu, Ammar Ahmad Awan, Jeff Rasley, Minjia Zhang, Conglong Li, Connor Holmes, Zhongzhu Zhou, Michael Wyatt, Molly Smith, Lev Kurilenko, Heyang Qin, Masahiro Tanaka, Shuai Che, Shuaiwen Leon Song, Yuxiong He. (2023) DeepSpeed-Chat: Easy, Fast and Affordable RLHF Training of ChatGPT-like Models at All Scales [arXiv:2308.01320](https://arxiv.org/abs/2308.01320). +26. Xiaoxia Wu, Zhewei Yao, Yuxiong He. (2023) ZeroQuant-FP: A Leap Forward in LLMs Post-Training W4A8 Quantization Using Floating-Point Formats [arXiv:2307.09782](https://arxiv.org/abs/2307.09782) and [ENLSP2023 Workshop at NeurIPS2023](https://neurips2023-enlsp.github.io/) [[slides]](docs/assets/files/zeroquant_series.pdf) +27. Zhewei Yao, Xiaoxia Wu, Conglong Li, Minjia Zhang, Heyang Qin, Olatunji Ruwase, Ammar Ahmad Awan, Samyam Rajbhandari, Yuxiong He. (2023) DeepSpeed-VisualChat: Multi-Round Multi-Image Interleave Chat via Multi-Modal Causal Attention [arXiv:2309.14327](https://arxiv.org/pdf/2309.14327.pdf) +28. Shuaiwen Leon Song, Bonnie Kruft, Minjia Zhang, Conglong Li, Shiyang Chen, Chengming Zhang, Masahiro Tanaka, Xiaoxia Wu, Jeff Rasley, Ammar Ahmad Awan, Connor Holmes, Martin Cai, Adam Ghanem, Zhongzhu Zhou, Yuxiong He, et al. (2023) DeepSpeed4Science Initiative: Enabling Large-Scale Scientific Discovery through Sophisticated AI System Technologies [arXiv:2310.04610](https://arxiv.org/abs/2310.04610) [[blog]](https://www.microsoft.com/en-us/research/blog/announcing-the-deepspeed4science-initiative-enabling-large-scale-scientific-discovery-through-sophisticated-ai-system-technologies/) +29. Zhewei Yao, Reza Yazdani Aminabadi, Stephen Youn, Xiaoxia Wu, Elton Zheng, Yuxiong He. (2023) ZeroQuant-HERO: Hardware-Enhanced Robust Optimized Post-Training Quantization Framework for W8A8 Transformers [arXiv:2310.17723](https://arxiv.org/abs/2310.17723) + +30. Xiaoxia Wu, Haojun Xia, Stephen Youn, Zhen Zheng, Shiyang Chen, Arash Bakhtiari, Michael Wyatt, Reza Yazdani Aminabadi, Yuxiong He, Olatunji Ruwase, Leon Song, Zhewei Yao (2023) ZeroQuant(4+2): Redefining LLMs Quantization with a New FP6-Centric Strategy for Diverse Generative Tasks [arXiv:2312.08583](https://arxiv.org/abs/2312.08583) + +31. Haojun Xia, Zhen Zheng, Xiaoxia Wu, Shiyang Chen, Zhewei Yao, Stephen Youn, Arash Bakhtiari, Michael Wyatt, Donglin Zhuang, Zhongzhu Zhou, Olatunji Ruwase, Yuxiong He, Shuaiwen Leon Song. (2024) FP6-LLM: Efficiently Serving Large Language Models Through FP6-Centric Algorithm-System Co-Design [arXiv:2401.14112](https://arxiv.org/abs/2401.14112) + + + +# Videos +1. DeepSpeed KDD 2020 Tutorial + 1. [Overview](https://www.youtube.com/watch?v=CaseqC45DNc&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=29) + 2. [ZeRO + large model training](https://www.youtube.com/watch?v=y4_bCiAsIAk&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=28) + 3. [17B T-NLG demo](https://www.youtube.com/watch?v=9V-ZbP92drg&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=27) + 4. [Fastest BERT training + RScan tuning](https://www.youtube.com/watch?v=o1K-ZG9F6u0&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=26) + 5. DeepSpeed hands on deep dive: [part 1](https://www.youtube.com/watch?v=_NOk-mBwDYg&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=92), [part 2](https://www.youtube.com/watch?v=sG6_c4VXLww&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=94), [part 3](https://www.youtube.com/watch?v=k9yPkBTayos&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=93) + 6. [FAQ](https://www.youtube.com/watch?v=nsHu6vEgPew&list=PLa85ZdUjfWS21mgibJ2vCvLziprjpKoW0&index=24) +2. Microsoft Research Webinar + * Registration is free and all videos are available on-demand. + * [ZeRO & Fastest BERT: Increasing the scale and speed of deep learning training in DeepSpeed](https://note.microsoft.com/MSR-Webinar-DeepSpeed-Registration-On-Demand.html). +3. [DeepSpeed on AzureML](https://youtu.be/yBVXR8G8Bg8) +4. [Large Model Training and Inference with DeepSpeed // Samyam Rajbhandari // LLMs in Prod Conference](https://www.youtube.com/watch?v=cntxC3g22oU) [[slides]](docs/assets/files/presentation-mlops.pdf) +5. Community Tutorials + * [DeepSpeed: All the tricks to scale to gigantic models (Mark Saroufim)](https://www.youtube.com/watch?v=pDGI668pNg0) + * [Turing-NLG, DeepSpeed and the ZeRO optimizer (Yannic Kilcher)](https://www.youtube.com/watch?v=tC01FRB0M7w) + * [Ultimate Guide To Scaling ML Models (The AI Epiphany)](https://www.youtube.com/watch?v=hc0u4avAkuM) diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/RECORD b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2b6cf7f11387f46d66198d6f53cbc10777a56df5 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/RECORD @@ -0,0 +1,1275 @@ +../../../bin/deepspeed,sha256=tx3_SO2Na520D9ZC6azgzNFz_1siDk_eA_-UOmP12Qc,114 +../../../bin/deepspeed.pt,sha256=tx3_SO2Na520D9ZC6azgzNFz_1siDk_eA_-UOmP12Qc,114 +../../../bin/ds,sha256=tx3_SO2Na520D9ZC6azgzNFz_1siDk_eA_-UOmP12Qc,114 +../../../bin/ds_bench,sha256=d7yRuS2eTQEOx9OqQGqtzMo3kOaF5uqsZa5hjtRprh8,765 +../../../bin/ds_elastic,sha256=6JwjDCeX84GsNgkUI3wCCHaLbXuQCFb7np5Iy4-QFIQ,1877 +../../../bin/ds_report,sha256=BuTF90oXPiIQr0iLLZKH47QF4cVC3x54l6eTeti_ojo,117 +../../../bin/ds_ssh,sha256=BT6cdZ47ceJiKPK40eVf9NCXF5RLjayEwWlyZ7viWbE,680 +../../../bin/dsr,sha256=BuTF90oXPiIQr0iLLZKH47QF4cVC3x54l6eTeti_ojo,117 +deepspeed-0.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +deepspeed-0.14.0.dist-info/METADATA,sha256=W-XDIc5pkdoouNuEybZygee72IO7u2esJofrsVR3b8M,41459 +deepspeed-0.14.0.dist-info/RECORD,, +deepspeed-0.14.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +deepspeed-0.14.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91 +deepspeed-0.14.0.dist-info/entry_points.txt,sha256=btWVtkuERrrqyxQojUVMJZOeaoNPz0pjiZKaJ_Q4MJ8,84 +deepspeed-0.14.0.dist-info/top_level.txt,sha256=VK6WJ48PiB2ke4oVIo2mLzqXYqGXdu4MPE2GP-vfvA8,10 +deepspeed/__init__.py,sha256=9vehgA1qG4Ky2-Kz0ucOp85AC7gCKvMnBLbb9B2d-_g,14510 +deepspeed/__pycache__/__init__.cpython-310.pyc,, +deepspeed/__pycache__/constants.cpython-310.pyc,, +deepspeed/__pycache__/env_report.cpython-310.pyc,, +deepspeed/__pycache__/git_version_info.cpython-310.pyc,, +deepspeed/__pycache__/git_version_info_installed.cpython-310.pyc,, +deepspeed/__pycache__/pydantic_v1.cpython-310.pyc,, +deepspeed/accelerator/__init__.py,sha256=1pNnvGX9h418ekjY_Xnpb363tLo-5Szw3icQf9hRvk0,248 +deepspeed/accelerator/__pycache__/__init__.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/abstract_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/cpu_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/cuda_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/hpu_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/mps_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/npu_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/real_accelerator.cpython-310.pyc,, +deepspeed/accelerator/__pycache__/xpu_accelerator.cpython-310.pyc,, +deepspeed/accelerator/abstract_accelerator.py,sha256=7hSBfGvoDmndNEf06McVVN_hwStiDbfaxbR3qAyBAa4,5470 +deepspeed/accelerator/cpu_accelerator.py,sha256=CW5jsZgR0O_Kc5PbLLTThf-kVi4dN7NkaWI8T45tcP0,8562 +deepspeed/accelerator/cuda_accelerator.py,sha256=14QLR1hUI5V9XYyTgvAllQzrs-WcatidNNqiFQTZ1qs,12040 +deepspeed/accelerator/hpu_accelerator.py,sha256=uPwdaf0w78dtzRebYnmzhC_h6mnXcjdHFINf939H-ek,9110 +deepspeed/accelerator/mps_accelerator.py,sha256=CsRkIlyLkzpg7cML7HthKw8YGwPS1ZGI0B7HcRJyK4I,6126 +deepspeed/accelerator/npu_accelerator.py,sha256=K1MHlQiUOhc-uItRE5YjcAGa6cdU2h2JaHTebL-P30U,8271 +deepspeed/accelerator/real_accelerator.py,sha256=Y4JwTOxGL2p58KgZBOfJmbgQ8UpPyzp3iXmgGYIyyz4,9964 +deepspeed/accelerator/xpu_accelerator.py,sha256=bLAhKEy-m32F4M2oAEkT87MFZWyXfEyq85JqbQoGQoU,9325 +deepspeed/autotuning/__init__.py,sha256=y0O9XDcX76E6dmhNFmRhy9TKfl0ywJrdkXmM2JuzErU,129 +deepspeed/autotuning/__pycache__/__init__.cpython-310.pyc,, +deepspeed/autotuning/__pycache__/autotuner.cpython-310.pyc,, +deepspeed/autotuning/__pycache__/config.cpython-310.pyc,, +deepspeed/autotuning/__pycache__/constants.cpython-310.pyc,, +deepspeed/autotuning/__pycache__/scheduler.cpython-310.pyc,, +deepspeed/autotuning/__pycache__/utils.cpython-310.pyc,, +deepspeed/autotuning/autotuner.py,sha256=zU6F-Aib-qUARtAAayLJVWZQ8nYCi_motuuHHs1wPM4,54320 +deepspeed/autotuning/config.py,sha256=pg5OeryVqrg32xak1NLEWaFiN1MYujdwlSgTuAz3Xl0,4633 +deepspeed/autotuning/config_templates/template_zero0.json,sha256=hR1baTMA5HzfTD1JlXjiTYjMtB9C8CFfxIBzqZeW2Sw,48 +deepspeed/autotuning/config_templates/template_zero1.json,sha256=AAIox-1dviiYg-Z7L-02bxItSnaggAlHkR4TekXXEIw,113 +deepspeed/autotuning/config_templates/template_zero2.json,sha256=FfrPKZHzMJuHn_TVJ7UZ-2s9yJ5IzBIKgT7nLYnIHh4,237 +deepspeed/autotuning/config_templates/template_zero3.json,sha256=WvsoMMp5MDgdVLq1RX1fhKsKbGSHQtpplZzGJikZvzs,485 +deepspeed/autotuning/constants.py,sha256=sr02-8B5ORdimK5VZ1N9FK1luWadmECFLSlMnM-uIxA,5943 +deepspeed/autotuning/scheduler.py,sha256=th_BWHDDZHWtOlNYDUa91p4El-W6wspRZxHxPJckTl4,15720 +deepspeed/autotuning/tuner/__init__.py,sha256=c9ImdL2iEc89lFltZ2PPe2EOUCuB3Olq3hRP1Q-yUMo,235 +deepspeed/autotuning/tuner/__pycache__/__init__.cpython-310.pyc,, +deepspeed/autotuning/tuner/__pycache__/base_tuner.cpython-310.pyc,, +deepspeed/autotuning/tuner/__pycache__/cost_model.cpython-310.pyc,, +deepspeed/autotuning/tuner/__pycache__/index_based_tuner.cpython-310.pyc,, +deepspeed/autotuning/tuner/__pycache__/model_based_tuner.cpython-310.pyc,, +deepspeed/autotuning/tuner/__pycache__/utils.cpython-310.pyc,, +deepspeed/autotuning/tuner/base_tuner.py,sha256=psA1I4-AEfqGrGLvYYUl3EjM8jG8AlhUXz-AnrFZ7G4,2754 +deepspeed/autotuning/tuner/cost_model.py,sha256=Uu9jD65cvdUK6aozm6Du3XJQZ_W-yOyUrSWFLdOT0do,1820 +deepspeed/autotuning/tuner/index_based_tuner.py,sha256=AEkTByT3XXCyYolAqBiMquV6XHN-ntGmYKMLZKPCRYE,1158 +deepspeed/autotuning/tuner/model_based_tuner.py,sha256=f_CrbgZNONRjdMuRhrJkQbhtih6GOg4LdkXqRDRDzaM,5614 +deepspeed/autotuning/tuner/utils.py,sha256=o5nD51Z6LBylJDRnDNIiOARlBHnOgINBVjBos6ypLEg,2329 +deepspeed/autotuning/utils.py,sha256=jz9cXQ6Qgbk_sfglyZ_A1qpm0NYBW-ET2-uxKQD7C5M,15053 +deepspeed/checkpoint/__init__.py,sha256=a69w1ZTe6jHftuguGEUHlP1ChLWFD-T-NV9pjepMqF0,561 +deepspeed/checkpoint/__pycache__/__init__.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/constants.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/deepspeed_checkpoint.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/ds_to_universal.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/reshape_3d_utils.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/reshape_meg_2d.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/reshape_utils.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/universal_checkpoint.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/utils.cpython-310.pyc,, +deepspeed/checkpoint/__pycache__/zero_checkpoint.cpython-310.pyc,, +deepspeed/checkpoint/constants.py,sha256=Q8HJTFapxw4wQLT-8UpHcxkdNBg9yKwqIPd0czPI0fk,3349 +deepspeed/checkpoint/deepspeed_checkpoint.py,sha256=HyD-8upqufOx9AAR0Re34r5UP_KUl8TWHJeQGMHkA1Q,12465 +deepspeed/checkpoint/ds_to_universal.py,sha256=prW8TdnBy15B9LZMaeJ1xqeuW4xEdoZXYGOeoKB7zwk,13547 +deepspeed/checkpoint/reshape_3d_utils.py,sha256=pmnOtdI3srRS46R9sXLh9kijXPxT3bFJ_c6lWdygbwc,4674 +deepspeed/checkpoint/reshape_meg_2d.py,sha256=o5dpF3CbrbtEt1twz0XsL3xbqCfLfEse7O2ke-WR9_s,7885 +deepspeed/checkpoint/reshape_utils.py,sha256=e_vuB1xgKtbr523Dm5JotPembOPrte9k2dB2sYS18L4,2888 +deepspeed/checkpoint/universal_checkpoint.py,sha256=cdnl5u0WJkmIq8AXqZ4oAagLjQ6uRlm6WE9fx-bFTRs,5143 +deepspeed/checkpoint/utils.py,sha256=YJQIgtUGI4BAMqjdreC8V3cn3xKeWwfknt9rK9IFOGI,2534 +deepspeed/checkpoint/zero_checkpoint.py,sha256=Toykz0nkkZcuagLL7n9iAN4PbWxJ1LmhJTAn5jIj-jY,5316 +deepspeed/comm/__init__.py,sha256=N31DOMdAdlfSnQd64slK89dpW_y1Vc9oPVZabLArcYw,137 +deepspeed/comm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/comm/__pycache__/backend.cpython-310.pyc,, +deepspeed/comm/__pycache__/ccl.cpython-310.pyc,, +deepspeed/comm/__pycache__/comm.cpython-310.pyc,, +deepspeed/comm/__pycache__/config.cpython-310.pyc,, +deepspeed/comm/__pycache__/constants.cpython-310.pyc,, +deepspeed/comm/__pycache__/reduce_op.cpython-310.pyc,, +deepspeed/comm/__pycache__/torch.cpython-310.pyc,, +deepspeed/comm/__pycache__/utils.cpython-310.pyc,, +deepspeed/comm/backend.py,sha256=TPFMx6ViU4n_NcfwWS56_Yp35PeCi-0Kh9za7UELeyA,1416 +deepspeed/comm/ccl.py,sha256=YJ_Ri38V_kueF4RiuI-HC42a6w0kr7RMkXoieqpIqn4,8538 +deepspeed/comm/comm.py,sha256=5dgWe0KOq6OnOsnlTcg2Qovm_v-RQjbXEKuv_-zzgxc,29299 +deepspeed/comm/config.py,sha256=TP6n9j6oHCDTe8LoLuYwN-zyA3KCefNBGDgD24cT36k,860 +deepspeed/comm/constants.py,sha256=Adtnb5LCEjRpylnwgsMAFRmWgPvxb8CEEDvn4-D39lw,1298 +deepspeed/comm/reduce_op.py,sha256=A_KxQtpfUrNun6Rn8SX6iQiuoVgfYM7bTtAYc-YTENE,259 +deepspeed/comm/torch.py,sha256=eow92yHYjRaETKAj8ekpz0ofW8oRorWNyaJlIY_Gi_U,17150 +deepspeed/comm/utils.py,sha256=I7hJ2Jq5HEPaJDD09eEI9xV5VJZeDdag0ovTeAc3Ex8,3842 +deepspeed/compression/__init__.py,sha256=s3fNEEJJdLa4D4rEwtraP5axoSqpxxmh6LAIvyzfUTM,243 +deepspeed/compression/__pycache__/__init__.cpython-310.pyc,, +deepspeed/compression/__pycache__/basic_layer.cpython-310.pyc,, +deepspeed/compression/__pycache__/compress.cpython-310.pyc,, +deepspeed/compression/__pycache__/config.cpython-310.pyc,, +deepspeed/compression/__pycache__/constants.cpython-310.pyc,, +deepspeed/compression/__pycache__/helper.cpython-310.pyc,, +deepspeed/compression/__pycache__/scheduler.cpython-310.pyc,, +deepspeed/compression/__pycache__/utils.cpython-310.pyc,, +deepspeed/compression/basic_layer.py,sha256=oZvNusdxqnlhM-RsaALf5Q-4m2kQLcKAUUMuIKBtXec,36047 +deepspeed/compression/compress.py,sha256=DueechYg3bDNAIn2XW0L9nv8qN81acTPwimvD4usE_8,11886 +deepspeed/compression/config.py,sha256=5TTZNLB_GDULXEf704-dbNJo0ILL47DNVWHWiMGio8Q,25067 +deepspeed/compression/constants.py,sha256=Fc2681jhRfm0ADzNjFKjhYIPSXAI29gqeQSGW0FxLoA,5569 +deepspeed/compression/helper.py,sha256=lEFpurhyU7-yKWgO-wp1RojHECkqR8qJ16KrT4Hl06U,14637 +deepspeed/compression/scheduler.py,sha256=QwnyBtRhkEkufjyhns-Riz_-GBdUePzuI8UEQJOu7Ho,8112 +deepspeed/compression/utils.py,sha256=vLWAEOQHa1w61zmMyFQ3dwxbm-5kM_S-zOI_eQhYx5E,7818 +deepspeed/constants.py,sha256=lyWNKJHfTPa0_rN6n4UoAiYERmKct9Sd43j4sH2qveQ,788 +deepspeed/elasticity/__init__.py,sha256=Cmi3gEN48VYdPSg4zLOJ1KGKP2G3Xeruz9TykgC13m4,383 +deepspeed/elasticity/__pycache__/__init__.cpython-310.pyc,, +deepspeed/elasticity/__pycache__/config.cpython-310.pyc,, +deepspeed/elasticity/__pycache__/constants.cpython-310.pyc,, +deepspeed/elasticity/__pycache__/elastic_agent.cpython-310.pyc,, +deepspeed/elasticity/__pycache__/elasticity.cpython-310.pyc,, +deepspeed/elasticity/__pycache__/utils.cpython-310.pyc,, +deepspeed/elasticity/config.py,sha256=OCmF8fP8_dL3LkKiEJDtOREnJB3NYQuCaK48qRs-eFA,4703 +deepspeed/elasticity/constants.py,sha256=5Ynz57XbIFxeFkhC3JwanR-Dw0-U7qrsqKIH9oQo-Hw,2454 +deepspeed/elasticity/elastic_agent.py,sha256=Wfv-T6M52YQLL5zeuIVZZGaowajoskAmwSxsRLgpv-Q,7886 +deepspeed/elasticity/elasticity.py,sha256=oGJXhEj-VVdT2iUa-ei_gHXAfhP5e_VgtUlw5N500mU,17361 +deepspeed/elasticity/utils.py,sha256=JQlpeRXFRDrFnb7DFMSOUyEOqHEqqXwk5PVx4-_jkEE,349 +deepspeed/env_report.py,sha256=-L4zfdCv5J3NwTU_bNJtsuPJd8I5ME7WDc9iGDIP4Sk,7355 +deepspeed/git_version_info.py,sha256=_dsr4G-e6ncmNC05_APkJC1iJs3yl691vCGKR25fIpQ,771 +deepspeed/git_version_info_installed.py,sha256=OCZgXQxM3wLwjTrOF6jje2pmfF62KViK6mH6eeJSxE0,1125 +deepspeed/inference/__init__.py,sha256=ZV8FRXVTJ3IGIDnyXJgJjRlCbB2jH_KJ1hWoTS4l_y0,267 +deepspeed/inference/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/__pycache__/config.cpython-310.pyc,, +deepspeed/inference/__pycache__/engine.cpython-310.pyc,, +deepspeed/inference/config.py,sha256=3Utd-MyLRpqKoODaZRiHjbgCAq1Aqt-HbBq6yoWfP0g,10130 +deepspeed/inference/engine.py,sha256=3X9IMwa3LYnXLPp90VwBDMo2d0TNknJTMq-Azz0Zv6I,31472 +deepspeed/inference/quantization/__init__.py,sha256=4I9UpQ5vMRU5SYSF_dW9FJDEnBq4m_0SuwtVQ92lGaA,95 +deepspeed/inference/quantization/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/quantization/__pycache__/layers.cpython-310.pyc,, +deepspeed/inference/quantization/__pycache__/quantization.cpython-310.pyc,, +deepspeed/inference/quantization/__pycache__/quantization_context.cpython-310.pyc,, +deepspeed/inference/quantization/__pycache__/utils.cpython-310.pyc,, +deepspeed/inference/quantization/layers.py,sha256=s62KAh9pysdpMzV3KuaFeOdrezxLSsPcZi-Fmsj0w84,5729 +deepspeed/inference/quantization/quantization.py,sha256=-r1gbQC0AZe3umfsjla_2lp6opWrfI-0jfs6KtOaJ8o,4396 +deepspeed/inference/quantization/quantization_context.py,sha256=0IV04DTAk8PiOi07TGgABxFMkrKM4QFOqzRaT-RZO7w,514 +deepspeed/inference/quantization/utils.py,sha256=kNSafXIsvDF9qBwjLW9bwpDpaKxM0zmY7xmPu-0cVVY,11948 +deepspeed/inference/v2/__init__.py,sha256=9LrBSo2xdUl33MPzLedqmI13rnQOJJO-eT6fVsrt61k,283 +deepspeed/inference/v2/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/allocator.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/config_v2.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/engine_factory.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/engine_v2.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/inference_parameter.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/inference_utils.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/logging.cpython-310.pyc,, +deepspeed/inference/v2/__pycache__/scheduling_utils.cpython-310.pyc,, +deepspeed/inference/v2/allocator.py,sha256=A-yTKojaNr_O8COkDIFagTl4dd9aYJRIMtR7g8TIvJ4,1182 +deepspeed/inference/v2/checkpoint/__init__.py,sha256=6y_8HbNnkff-I3LYNyzZQ3DVDcy8CwjQAYykp_JiCw8,252 +deepspeed/inference/v2/checkpoint/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/checkpoint/__pycache__/base_engine.cpython-310.pyc,, +deepspeed/inference/v2/checkpoint/__pycache__/huggingface_engine.cpython-310.pyc,, +deepspeed/inference/v2/checkpoint/__pycache__/in_memory_engine.cpython-310.pyc,, +deepspeed/inference/v2/checkpoint/base_engine.py,sha256=9EmyYEwt1ZE861yuS-ayx9ykG2OzRi9p7_ua4hECMEs,1391 +deepspeed/inference/v2/checkpoint/huggingface_engine.py,sha256=Ewf_ZD5yvuZ6uFqeolGlegEnK-FXv-HwWpH3Djf9EXw,5560 +deepspeed/inference/v2/checkpoint/in_memory_engine.py,sha256=4tEQQPAWl4BG4FJFdcgnoa8jahMQUtrXtcssTwRUSeo,1487 +deepspeed/inference/v2/config_v2.py,sha256=JkEKCeYLrvpMEU84-Yz0b9G_EDCLJPbwwWER1FQywH4,1401 +deepspeed/inference/v2/engine_factory.py,sha256=3vlKzgGvgXWY2Bwcjs21IehVSd0fFMrgR_6l621bics,5719 +deepspeed/inference/v2/engine_v2.py,sha256=2IiT1buZLPYL66oC8OQdpANhRSFjN-xjvnnCJh9Wqts,10491 +deepspeed/inference/v2/inference_parameter.py,sha256=rPeDJ4h58L4weNeIiFDIUrpVgDLpkQtzihUl_L-rkbg,2788 +deepspeed/inference/v2/inference_utils.py,sha256=V5gryIBXY9kVVkwPWukMMzQKbn0vH2hyDWnDhQ4jWQk,2378 +deepspeed/inference/v2/kernels/__init__.py,sha256=KHPoSEGybfJdU54HoIOCZzeIovB3EsfzaZuHv_i36V4,132 +deepspeed/inference/v2/kernels/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/__pycache__/ds_kernel.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/__init__.py,sha256=72TCKbfeUY6LbxtwVeNKljxXvxFrY-zobANphLq7nVQ,276 +deepspeed/inference/v2/kernels/core_ops/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/bias_activations/__init__.py,sha256=BpRq2zT5tKcsx4e7f8zcnDA5F6wEedmQXSI6idSEoTo,127 +deepspeed/inference/v2/kernels/core_ops/bias_activations/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/bias_activations/__pycache__/bias_activation.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/bias_activations/bias_activation.cpp,sha256=PdSuObELmrSu3lsYgfVEOO9tEEmexqO0an3_pP4nU3k,3084 +deepspeed/inference/v2/kernels/core_ops/bias_activations/bias_activation.h,sha256=q3vJjn6I7s_raM1Gy2TNnrY3nbskDDP9QyWwcDjpk6E,688 +deepspeed/inference/v2/kernels/core_ops/bias_activations/bias_activation.py,sha256=6uu2cWl65xBVWVHfZR9TfLiqrovUJOxOm50y2iZ9Y28,2437 +deepspeed/inference/v2/kernels/core_ops/bias_activations/bias_activation_cuda.cu,sha256=dq_Bwd-UGv8bK3wWA1llNGVUEXO_rAMZInl5PcfpO4Y,4820 +deepspeed/inference/v2/kernels/core_ops/blas_kernels/__init__.py,sha256=ND9hDC4WOLfDXnlqe7FySprn8RkCXvzrwQKwYG8ZFRQ,123 +deepspeed/inference/v2/kernels/core_ops/blas_kernels/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/blas_kernels/__pycache__/blas_linear.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/blas_kernels/blas.h,sha256=YtINP8OfWWiyZEx44zb1yylsXBHGBLdfxcb3xjBUNeI,5164 +deepspeed/inference/v2/kernels/core_ops/blas_kernels/blas_linear.py,sha256=AQ4eOYQ3ejZspCeBHm87Q32f_Lf_ooauZIoVobdD_G0,2023 +deepspeed/inference/v2/kernels/core_ops/blas_kernels/blas_utils.h,sha256=A9uQ1X55yAze5NwCT8vE12OH_TfqxoMz3G4vDRa_aVg,10157 +deepspeed/inference/v2/kernels/core_ops/core_ops.cpp,sha256=7CWy4rOvIT5KR59q1sHmDf4iCnbaxpyHD1jdclmajcs,1524 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/__init__.py,sha256=tUarONsuAuUxyfogdH03MC3yebDySSrUEusyIEWNjrg,174 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/__pycache__/cuda_fp_ln_base.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/__pycache__/cuda_ln.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/__pycache__/cuda_post_ln.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/__pycache__/cuda_pre_ln.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/cuda_fp_ln_base.py,sha256=CmnqS4_iIbPWKbzNCKEv3wnng8UnRtyhSJiEz08cfPI,1302 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/cuda_ln.py,sha256=bp4CvANVf8hQQzyKOIScnR1OU7kB41uBNK5ANIydCvs,854 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/cuda_post_ln.py,sha256=cIJ9l9ey_fAdjuX00ZjooepUgvNEg_bc2ikSiR0MWK0,990 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/cuda_pre_ln.py,sha256=meb0fP8vvu1lFGlMR8iEus7YKlGP6FMlad9FJ7ZPg8A,1225 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/layer_norm.cpp,sha256=cWe1Z6kgQuPsrSTCOO1mWBtFY_CYqXHUt1HqhfiOyNs,4199 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/layer_norm.h,sha256=SCuiMaChAEgmE0t2kwfbqaYXQ1ycU1QXDVDJSHxtQ6o,2157 +deepspeed/inference/v2/kernels/core_ops/cuda_layer_norm/layer_norm_cuda.cu,sha256=0lUOi_nh1YrOFUwAx4KQsIsIaNlSvrCBkfoOxi3wJtk,19765 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/__init__.py,sha256=0J6ih4FAYwgMMo0HSRWLTiBimI9hdeAFh9Ui5iwx2NU,123 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_linear/__pycache__/cuda_linear.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_linear/cuda_linear.py,sha256=_2OxKHsY9P5mKOW3sT1ZTIy72d3BGpEjt7_Kb2bNsD4,6827 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/cuda_linear_kernels.cpp,sha256=jjE1r9B3diVWwz5qglJOAIQaKF7oHasWY-UdjraDQEI,8897 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/cuda_linear_kernels.h,sha256=jwQKY8J49wJUBKa-jCCBgjouM-nc6rohO_8cA8lGZyQ,746 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/fp6_linear.cu,sha256=mi0zkwPalQ4AHSciMbcB_cd34O_lCEltCFIbzZg1Yt0,13916 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/fp6_linear.cuh,sha256=2Wi_iFXHEcKbupMYFShPToF2yKQna0WdVwUS_yXkY0U,1570 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/configs.h,sha256=XPl9PmMyD-OVFksGXN-eK67a-aoxJvjxNEBIXm3q6Fs,5044 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/kernel_matmul.cuh,sha256=W9s3rxCJqONbDZPE85-8mO-bp_I9vI5WnrAudOPUIDU,14429 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/kernel_reduction.cuh,sha256=L9fBtKm1jXCJYd5aZVyModdF5WcWXrKKRGfWqvq5YSE,1383 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/ptx_cp.async.cuh,sha256=AqMUDDWCtmFOjviGsADJCaRQ7Ty0XBoBHa1hCzWVTiI,1609 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/ptx_mma.cuh,sha256=UhiKUlg2yQfTLyLY1SoWlRmF6Aqwhv4fRF8xbHisCMQ,4580 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/utils_core.cuh,sha256=r-ruG0-m_NfOR_fejqYFPCVIjTkDmpeMbItZUdn-6l8,10207 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/utils_gmem.cuh,sha256=rSBPQ-b3vchAe2DOCk6Olpvj6ILAfw_5KgKLzY9rKMc,3163 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/utils_paralleldequant.cuh,sha256=teIrwngF8c5wy7AydVC9HN_jfWqqaePoxRPe5wHE4so,4028 +deepspeed/inference/v2/kernels/core_ops/cuda_linear/include/weight_prepacking.h,sha256=7N7RzilXZ_7rncF4A3Fqsvh6BmUVXZIBwl6UbTzct-8,8787 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/__init__.py,sha256=S3Q1i1iwyLp6kWWJjD2kTinhJQ_ND4gKHejhuxQ8-74,171 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/__pycache__/rms_norm.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/__pycache__/rms_norm_base.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/__pycache__/rms_pre_norm.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/rms_norm.cpp,sha256=XqvW2ObN0dkRQIsqtWRGl_JYxkFjKAi-zjFMeEZVa3M,5873 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/rms_norm.h,sha256=FUf2hMir5TnFA6iU1Rx48DlnSYX__Yl0jkiNWARdREA,978 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/rms_norm.py,sha256=OkC86DHLQ03zshmFNUQO9r1mZ4mWD3gN0505IvzC1NE,770 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/rms_norm_base.py,sha256=cbZXvZSXRaD1MoTokRSRuw1anp_FXdGzg0moAxasQ1I,1311 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/rms_norm_cuda.cu,sha256=GrWivwyCSZ8gh1zaiJoKHh8sTcq8aot5m7XKx-Auw1A,10216 +deepspeed/inference/v2/kernels/core_ops/cuda_rms_norm/rms_pre_norm.py,sha256=qzME0FbrgV9IXbGCqIA0IP91rfFyRiTdiXwwQ39SufM,1204 +deepspeed/inference/v2/kernels/core_ops/gated_activations/__init__.py,sha256=2mIcBil301zqLsisa-B2C2EnTgQ2Zaz2QFRxPTD8eUE,128 +deepspeed/inference/v2/kernels/core_ops/gated_activations/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/gated_activations/__pycache__/gated_activation.cpython-310.pyc,, +deepspeed/inference/v2/kernels/core_ops/gated_activations/gated_activation.py,sha256=XQXwTY3PN0qWIdZMBWh8De2lEGtN8syw7ItH_uN319g,2755 +deepspeed/inference/v2/kernels/core_ops/gated_activations/gated_activation_kernels.cpp,sha256=aog8VhJGL0F4bwUOZTR232KTgRJRavFFk3Xj_55shLk,3521 +deepspeed/inference/v2/kernels/core_ops/gated_activations/gated_activation_kernels.h,sha256=DpAhhx6lO3TNA8eid6Tct9qfDrNFweHRypmn4serMW0,776 +deepspeed/inference/v2/kernels/core_ops/gated_activations/gated_activation_kernels_cuda.cu,sha256=Ju3GUCBv_-VOooBKV4POhfBiyu4HsQCKOaNLQPw-vPw,6068 +deepspeed/inference/v2/kernels/cutlass_ops/__init__.py,sha256=hpTBFM7draPD0dwxTHvGYgAdic-ysFMG6_XEr8x6V5c,146 +deepspeed/inference/v2/kernels/cutlass_ops/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/cutlass_ops/cutlass_ops.cpp,sha256=v6-PzCFZnmxFPXJm-pooeUISCq2V0S4E4_PAvg7oA6w,483 +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/__init__.py,sha256=leIZbZ0995YwKwql6mU54HN_DJ1iBe67Cq_GdlXSMAY,122 +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/__pycache__/mixed_gemm.cpython-310.pyc,, +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/mixed_gemm.cu,sha256=fDh2z9IwBffz5AI1NltOo4tgC2a2c0bwZ-leLahnR2k,4131 +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/mixed_gemm.h,sha256=laUi-GFh6qbLo0Ywiz_h7TtNIxroXSFwPQKc6yBGhE8,410 +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/mixed_gemm.py,sha256=gZ7ljMs2f20rcL1624vIhiLg_yemxkrYYFDoX-K1M3Y,2674 +deepspeed/inference/v2/kernels/cutlass_ops/mixed_gemm/mixed_gemm_api.h,sha256=soDC6vGW7UCYrnfw1EftpmOz6BuOi27Ihyi8J2Cfw2Q,1570 +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/__init__.py,sha256=RRc_fJh0o2_l9wVb7Czf2GqTxLzvS6Rj_zeYUGzRjjI,150 +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/__pycache__/mixed_moe_gemm.cpython-310.pyc,, +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/__pycache__/moe_gemm.cpython-310.pyc,, +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/mixed_moe_gemm.py,sha256=pAaowuHqgwtCqv3tyof9jpbPxZxgLdcj1vPM2NgDcx4,2996 +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/moe_gemm.cu,sha256=FRZSfEKLH3lHjvdbTbuEr1qGAi1spT6lhTLuIJ_OGuI,8594 +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/moe_gemm.h,sha256=hdqzk6YOF68wO1DBVWDdtX5z-MIGHdtekijLU8z1cHM,740 +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/moe_gemm.py,sha256=zX6btY3JLxP-6mgcjlNLSHlmgHJ2z3ZMH6PV3DQPfeE,2572 +deepspeed/inference/v2/kernels/cutlass_ops/moe_gemm/moe_gemm_api.h,sha256=RWmGN2LrcQukrsLoYaqykymCQJ1onmubdqXrHHgx2Is,1839 +deepspeed/inference/v2/kernels/cutlass_ops/shared_resources/weight_variant.h,sha256=1Mr74mw0SDFt0BsMgWiLvYXXaMWmhIgCUz1MZlWXYMs,278 +deepspeed/inference/v2/kernels/ds_kernel.py,sha256=R0n_uBCLCGAGlJa7lvep4cNF0DBxkD6TDKDFNAWpBpU,961 +deepspeed/inference/v2/kernels/includes/activation_type.h,sha256=WkFfkL5mgLyYAZimLUouThCkyA2aiF9LQlh1q206L9o,264 +deepspeed/inference/v2/kernels/includes/conversion_utils.h,sha256=augq8Zf-t46MaxWh0ryFb46_Guxud0k4kIK-h93rI8Y,12379 +deepspeed/inference/v2/kernels/includes/ds_kernel_utils.h,sha256=SNb_fMME8CA5YI1ws4kCW99jR4S2ccQqIgTA7ymmttQ,1280 +deepspeed/inference/v2/kernels/includes/memory_access_utils.h,sha256=wqpAqnfmSD5Fx15qwsnY63VCL6tYyJaChCqLxakM10w,33966 +deepspeed/inference/v2/kernels/includes/reduction_utils.h,sha256=189h93WF77CblgWonfW5eQqoTeJnb-Rt2Ov2fZ6mO-E,22352 +deepspeed/inference/v2/kernels/ragged_ops/__init__.py,sha256=8C6TomumccM_-p-iiyPyjZnDLPjTdQS8E4ydGQGNLu4,324 +deepspeed/inference/v2/kernels/ragged_ops/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/atom_builder/__init__.py,sha256=djM0DTjGkr4vvNuhRIWV-2OI7lroepRMBiHIJtrcqTA,124 +deepspeed/inference/v2/kernels/ragged_ops/atom_builder/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/atom_builder/__pycache__/atom_builder.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/atom_builder/atom_builder.cpp,sha256=AjIgvgoS7PUYGt4MW7oFu8QEcQvm3J16tyerW-dx8cI,2051 +deepspeed/inference/v2/kernels/ragged_ops/atom_builder/atom_builder.h,sha256=i232uHXwHjh2EaAF3709YNcTQ29uVDI2GQOs6GnYAsQ,732 +deepspeed/inference/v2/kernels/ragged_ops/atom_builder/atom_builder.py,sha256=QgVnamgI1qG8xjzu-9i5sdBccOuhgbTeAtufUtLNdog,1744 +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/__init__.py,sha256=696rc8-r9nWCnfSG8UZ_fagI28Nerwq-nZCDvIIXPRs,125 +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/__pycache__/blocked_flash.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/attention_atom.h,sha256=-ITylnAQx_20rXiWdPPldb149cGG-fNQUqOg8iHVugU,1062 +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/blocked_flash.cpp,sha256=qaNovmlpqw7Cw_Y1ArGmH-7fB1nPQd0idvmC4nDR9pw,3912 +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/blocked_flash.h,sha256=io7L6krERzOgiBIKNk3zwEppMDSvWtOqCY9nH35dB_U,459 +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/blocked_flash.py,sha256=-aU8A7FbwrRqJPSHZEOef6Y3_PRndGKfCFT1c-3S1DQ,3830 +deepspeed/inference/v2/kernels/ragged_ops/blocked_flash/flash.h,sha256=VJKtKOCufGTGt8ZGuPXfsA-dzsi87cqNTYX7pbqHKH4,1931 +deepspeed/inference/v2/kernels/ragged_ops/embed/__init__.py,sha256=y4HCIAaLuHexvTBcNppgiXqo76vHb_pAahL75kQBKY0,137 +deepspeed/inference/v2/kernels/ragged_ops/embed/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/embed/__pycache__/embed.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/embed/embed.cpp,sha256=iI-rjkQlCGnujjVzY1h8e6ccO24G7eVxwPLY5t_HKxY,4840 +deepspeed/inference/v2/kernels/ragged_ops/embed/embed.cuh,sha256=C5ZS1W9lnEDPBmWsuoJpRP-pgCYwLxqTK_OI-DQrT04,956 +deepspeed/inference/v2/kernels/ragged_ops/embed/embed.h,sha256=XbqCgqlx16bRtRLljUstzjssRJIGx7AixbbaofhILl8,700 +deepspeed/inference/v2/kernels/ragged_ops/embed/embed.py,sha256=6lxDG2ymTyx2yGZ_9jDFyvcchbtD1iwKr2icLgHtmxE,3011 +deepspeed/inference/v2/kernels/ragged_ops/embed/embed_cuda.cu,sha256=5JfvhMBcKAPXQbF4WvE3Ljmolom8ArjvkNtprx1SaKo,6055 +deepspeed/inference/v2/kernels/ragged_ops/includes/top_k_utils.h,sha256=JWLYr_XaxsSKzYDrql2mq3czcvpMWdfm5bYYWxQ16VU,512 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/__init__.py,sha256=ou-mEvP9FMj89HvxswZbtTvS_T8Gcy3zf16jPcb_Urw,208 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/__pycache__/blocked_kv_rotary.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/__pycache__/blocked_trained_kv_rotary.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/__pycache__/linear_blocked_kv_copy.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/blocked_kv_rotary.cpp,sha256=qUdVHb-2UMnlH8Q6zZJLUW69T75AMwfqEZjco2EXbdY,8110 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/blocked_kv_rotary.cuh,sha256=X9w1u75CiGNEOV0uaXAmjX8usEiTXFTZG7EK3ulvZDc,1676 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/blocked_kv_rotary.h,sha256=5RfNKDify3eHIV6GjfqNozT6_lc2shcwNidgrQ6AJD0,2406 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/blocked_kv_rotary.py,sha256=_Gm-lifSoXPbJdE4E_KmZJSMatrHDoSKiTSIta6kT1o,3132 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/blocked_kv_rotary_cuda.cu,sha256=mWwmw_7o16vUoscUCYepgsJp-ZRX6ahg8W-7Zc09fek,18951 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/blocked_trained_kv_rotary.py,sha256=paNdriwxr6CQxQ-e7JAvNxg7EloNEMCO3nOlWI7HtWE,3216 +deepspeed/inference/v2/kernels/ragged_ops/linear_blocked_kv_rotary/linear_blocked_kv_copy.py,sha256=vGAWocPPIm3UI22GYZEXCWb1MVuyZsp2BeJjKQCxl5g,2953 +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/__init__.py,sha256=-6rVFXUnZ3U4MBA1VNuKQfhcdjlWgc_xnufXpnFfGyg,125 +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/__pycache__/logits_gather.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/logits_gather.cpp,sha256=wItdeUW_dSJzfePXbRKjYeggcfGG_l8yO7OAPXF4IeU,1800 +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/logits_gather.cuh,sha256=OQlrhwuHJIwGfhbozWlM4NKhYoKBtAL-3nc0vqATdZY,624 +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/logits_gather.h,sha256=bRw1hkiDAAqkP9FkNwsL1QZ0UeGSx1_pYHOY4yoTF5k,571 +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/logits_gather.py,sha256=kGPLmxKKqOh2x35QWtCpeXLXxoG15kxmE_Z_-cd7Yv0,2121 +deepspeed/inference/v2/kernels/ragged_ops/logits_gather/logits_gather_cuda.cu,sha256=Dd2HJJHpPlDkt2C91667Fn1GVuc4jonsIW8s1vPKhHk,3312 +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/__init__.py,sha256=K3BZ4_CDJGkNmvFCeYvIEBY2DRMWqq5CzCL7HAStX8Q,122 +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/__pycache__/moe_gather.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/moe_gather.cpp,sha256=zWULPg_6e15LzTk5q2DJJcsgwT4zoLhTNIc0ClJf73w,2392 +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/moe_gather.cuh,sha256=orang8iBcPNQ0BjFTeiBWNX09UjDq82Sh8tyye-5xyU,706 +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/moe_gather.h,sha256=MfJuJ2rg_ba6KvIm1LNbTTHMBH8MYQHgd7JeYJSG9Q8,561 +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/moe_gather.py,sha256=31zC0Sb3caA12JdhT7qJoawCUTkKB5PjZpFupQI6FvE,2265 +deepspeed/inference/v2/kernels/ragged_ops/moe_gather/moe_gather_cuda.cu,sha256=oy_bzQhyMfmjwiXag0u4YKjyKHaLXMDWYHlJQnQZNWM,6679 +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/__init__.py,sha256=e3cxpdOKqIBYpxvNmMnj6Ms883dqdsl4B_fXdqTnlnM,123 +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/__pycache__/moe_scatter.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/moe_scatter.cpp,sha256=ywodf_RYumTot90MnTLb4o1h7HDWgiFuVTuhHwxNooE,2858 +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/moe_scatter.cuh,sha256=M6t_ydSLYKsekCym5m07AGDu8-U_qVG0hLuVH_nrO-A,772 +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/moe_scatter.h,sha256=pBdy99sow7TKoB3eV3gdPbCB4_pGNx48K-Eo8C8XjHs,701 +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/moe_scatter.py,sha256=7c_jFxhoJINWl57LIhFZwvlr08W8Nbdk7ULSFmC8Yqc,2504 +deepspeed/inference/v2/kernels/ragged_ops/moe_scatter/moe_scatter_cuda.cu,sha256=US5_PjVcIqYcqR2DDKXUIJAMfdXKI0ElfbNCUmLscbw,8070 +deepspeed/inference/v2/kernels/ragged_ops/ragged_helpers/ragged_dtypes.h,sha256=OVEob4D9LFPkyxA4PscOrGV5BzBalfiMrTrkcifCvL0,940 +deepspeed/inference/v2/kernels/ragged_ops/ragged_helpers/ragged_kernel_helpers.cpp,sha256=QaWqjTz4kXXR804xXlgNBTOxtOtLSP3NcGDNHuFqqQI,1149 +deepspeed/inference/v2/kernels/ragged_ops/ragged_helpers/ragged_kernel_helpers.h,sha256=_7Xb4l1mfYbSz7i2HAZ8TqQSLJy6TBBC51OMaarJXfM,563 +deepspeed/inference/v2/kernels/ragged_ops/ragged_ops.cpp,sha256=Bnku0tlvBSyPIwOtx7-kvIGg3OPGmsgjMO72rLABEkQ,1472 +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/__init__.py,sha256=N0qSPddtzqj_UkISW7rxEVXpdk8a5JItbXBjNAY_FBE,139 +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/__pycache__/top_k_gating.cpython-310.pyc,, +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/top_k_gating.cpp,sha256=EBxWZ_Nm2ygSbFtnzI7kGMsl3nZxECXXefAVjMDDVx4,2435 +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/top_k_gating.cuh,sha256=1U1wNpJPmIhCiSwRi7xeRSR_zCcGnuXbFwBOz_gQplw,751 +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/top_k_gating.h,sha256=FdryRq5nESxX9HgzKIhnUgwfaoOzLir5O361gf-4g_M,556 +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/top_k_gating.py,sha256=-xgNLLmp6ea_QIMkGtuYDgt-fLfvq6wPVKgMWQpIHfA,2578 +deepspeed/inference/v2/kernels/ragged_ops/top_k_gating/top_k_gating_cuda.cu,sha256=LCuHhCZhWYGT69EW4mq3sPYr9JafW9Ef1YIScTNCQC0,5130 +deepspeed/inference/v2/logging.py,sha256=H4Dp4vKNi42HZaNgXOAOG-Vki2oC3U3APVQwq3ay8kE,780 +deepspeed/inference/v2/model_implementations/__init__.py,sha256=q9r2n-KHFDe0VOsQxW-UDMtYgyMJJ6qL5Q4vP7f5jP4,530 +deepspeed/inference/v2/model_implementations/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/__pycache__/flat_model_helpers.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/__pycache__/inference_model_base.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/__pycache__/inference_policy_base.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/__pycache__/inference_transformer_base.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/__pycache__/layer_container_base.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/__pycache__/parameter_base.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__init__.py,sha256=_mp6NDvxO-3TM9LiiOTrMBRIweeiRjU6s6L-GpTvX9U,359 +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/attn_output_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/embedding_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/invfreq_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/mlp_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/moe_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/norm_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/qkv_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/__pycache__/unembed_parameters.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/common_parameters/attn_output_parameters.py,sha256=VwOj8hRbIQQnWfoHVq-vbRK_QLt9f-uimARsTHfot4w,763 +deepspeed/inference/v2/model_implementations/common_parameters/embedding_parameters.py,sha256=k7CMnQ06UchamvQXsdeiTfxAkjpor3qZjTU0cD20hZU,605 +deepspeed/inference/v2/model_implementations/common_parameters/invfreq_parameters.py,sha256=Srz4b0mFK73OBzsw3EK3Aydj21Z1gZjLuyC8qfry6QI,401 +deepspeed/inference/v2/model_implementations/common_parameters/mlp_parameters.py,sha256=LWMXApceofCiyZP0CvNtiEZhK6A23OTriuTynOXAjwI,2753 +deepspeed/inference/v2/model_implementations/common_parameters/moe_parameters.py,sha256=hTKZ8zIERsvQ6i6zQRWe47qUcJmMAGOHfJinwpY3r04,2548 +deepspeed/inference/v2/model_implementations/common_parameters/norm_parameters.py,sha256=xTaLT5H81-Et8y9FivEaY8N9VQ0XDsvtlV4v1wnFUG4,454 +deepspeed/inference/v2/model_implementations/common_parameters/qkv_parameters.py,sha256=pxBm6EBMNLvbJTPAY9ABjdQ68-oxbY_nWb5CFGDdvdA,4086 +deepspeed/inference/v2/model_implementations/common_parameters/unembed_parameters.py,sha256=-gdVA8UMMvFsd-lSAvCbQp5mQ25j1sT5f7KC2mSVY_A,651 +deepspeed/inference/v2/model_implementations/falcon/__init__.py,sha256=IVA-W-wyht49JDQt7MTTsBMN4dHAAQxWPEULVVwkoVY,129 +deepspeed/inference/v2/model_implementations/falcon/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/falcon/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/falcon/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/falcon/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/falcon/container.py,sha256=TNdqevl_yh4g06l80i3RJC88dExK2bfd_22NSA-yz0M,4479 +deepspeed/inference/v2/model_implementations/falcon/model.py,sha256=ELmk5fmti6cUmUqYRcsnykS746An6eEibPf0G3IQsgs,7768 +deepspeed/inference/v2/model_implementations/falcon/policy.py,sha256=oFyIvfzFEistRJCzXS-wPtFfUXDPqt9ANhPa7ywrKSQ,1342 +deepspeed/inference/v2/model_implementations/flat_model_helpers.py,sha256=w5xeAQXsJ39EDHTiOAih4zNatO_1LlwB6CdGIyTymDo,9741 +deepspeed/inference/v2/model_implementations/inference_model_base.py,sha256=3YVtQafVIrZMI-kZadRKmQklafI5fjc38GTdTZeM3jQ,10083 +deepspeed/inference/v2/model_implementations/inference_policy_base.py,sha256=enve3SYK8HTf0HQB8FieQQt22zEaJRQD0MWI-1VtddU,9461 +deepspeed/inference/v2/model_implementations/inference_transformer_base.py,sha256=SgXRyFYWj31DJMbDpgo06nEyAJLD-w3qKJF1xbAm9Jo,23592 +deepspeed/inference/v2/model_implementations/layer_container_base.py,sha256=dWkfTe5NwRV2icpr2n9z-WsVTBlmitOxteFkgaAKPhg,15443 +deepspeed/inference/v2/model_implementations/llama_v2/__init__.py,sha256=J8aLmbt1KDVL_LOtSqaP-xACSxlgIDjeaIA9R-Grsgs,129 +deepspeed/inference/v2/model_implementations/llama_v2/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/llama_v2/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/llama_v2/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/llama_v2/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/llama_v2/container.py,sha256=blAioiSrdB9kH0zHFV0ZO1kWChv6-geaZW1ivEaSQx8,2664 +deepspeed/inference/v2/model_implementations/llama_v2/model.py,sha256=YmauqGYxtx2o_apo28aYvEhxi3SmYYvLm1IoEE7tGgs,7515 +deepspeed/inference/v2/model_implementations/llama_v2/policy.py,sha256=A1z_j19nj8yOURNNC29DeSyMatAjzuPUtcnIz09zZII,1148 +deepspeed/inference/v2/model_implementations/mistral/__init__.py,sha256=rzXGZBulBvo8ZNt7E5z1mSi8OlN1edNDTFlLCMCqo3M,130 +deepspeed/inference/v2/model_implementations/mistral/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mistral/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mistral/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mistral/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mistral/container.py,sha256=0THLABZ6g4W4H_Kf2INGnyVrXHroT2uveKr-F2deP88,2784 +deepspeed/inference/v2/model_implementations/mistral/model.py,sha256=QhDoAoi2aMguJf4GJXqNyivqRKAjJLUwf5EsVgYHl38,7350 +deepspeed/inference/v2/model_implementations/mistral/policy.py,sha256=ceBkx-EOWH6UmcosmmhcE7anO5cDMaPGhLInK-x3PBA,1056 +deepspeed/inference/v2/model_implementations/mixtral/__init__.py,sha256=ywKUfNNkXZuDaMa9NT05H3MkYJQgiCWl3eM8kZMkhJE,130 +deepspeed/inference/v2/model_implementations/mixtral/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mixtral/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mixtral/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mixtral/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/mixtral/container.py,sha256=YMdy5flaitAQHdy71VBCi7n0vQBfWcPRyXTfZeABchA,1658 +deepspeed/inference/v2/model_implementations/mixtral/model.py,sha256=smAqT1nkiFmn2ipNNvJqd2BiIHHprI5O7iWKOFsKM4M,9096 +deepspeed/inference/v2/model_implementations/mixtral/policy.py,sha256=u1Am_kWG7UvEt8A5V4lTn_BjUZ4eSlnTB32R5agIYy4,1057 +deepspeed/inference/v2/model_implementations/opt/__init__.py,sha256=WeJqbtuxqlXaqif4F_4KpZnGBCwXwKEn0OwUZOcxmpo,126 +deepspeed/inference/v2/model_implementations/opt/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/opt/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/opt/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/opt/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/opt/container.py,sha256=1jIU2Icv-lSEi_XMhaS4jGUVWFfbP-B2t2xf52eQVwY,3415 +deepspeed/inference/v2/model_implementations/opt/model.py,sha256=3FFAoq-iBo_F5ep0guXKXk6d5BnNOx0QZFiv2VILScs,7262 +deepspeed/inference/v2/model_implementations/opt/policy.py,sha256=9cqEPs_pM9gdzsBegbXJ7GHEgHfmD6n_AaRpjMH1yGE,1066 +deepspeed/inference/v2/model_implementations/parameter_base.py,sha256=Tb0SRvm6cB6G730Gb6ew051XA8Awngs6gaYaroKn_f0,8993 +deepspeed/inference/v2/model_implementations/phi/__init__.py,sha256=jKL7LDi4qrR3X3S6rcFpkls2K0wXVV6Qb3mIkiOtRKk,126 +deepspeed/inference/v2/model_implementations/phi/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/phi/__pycache__/containers.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/phi/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/phi/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/phi/containers.py,sha256=L1S3qe2iHCv73EtWzb-IPn_dg8rxzwUn27HPXMatmCg,3197 +deepspeed/inference/v2/model_implementations/phi/model.py,sha256=b12uXjTpbHy_0EklvawD67KxwkrV-ooYSSdjNxGbz58,7251 +deepspeed/inference/v2/model_implementations/phi/policy.py,sha256=FIcCwm7nwUQOoFT1yGV_OTFd2xldUWmL_TqmpjtjUgU,1175 +deepspeed/inference/v2/model_implementations/qwen/__init__.py,sha256=1TgzvNCP0loWbohqWRMH9NXCKKjXVVagBtatz_NCHc8,127 +deepspeed/inference/v2/model_implementations/qwen/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen/container.py,sha256=4fnYCNY92cDw116OyEVhmp_fuFtfeeyq4ZG4T5NHVi0,2362 +deepspeed/inference/v2/model_implementations/qwen/model.py,sha256=cCoVWHR2t_9LrLSwYE2LTCJa6nxXj9-l-Qr5q819Iq0,8112 +deepspeed/inference/v2/model_implementations/qwen/policy.py,sha256=0nP7dboH8FSnh6xvImRKK0gVWkw2iOTImwCJw73V-OA,1066 +deepspeed/inference/v2/model_implementations/qwen_v2/__init__.py,sha256=s0OcwPN9W24-8vYSQ8kztzky9jEwOvER9aMR60Xejuk,128 +deepspeed/inference/v2/model_implementations/qwen_v2/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen_v2/__pycache__/container.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen_v2/__pycache__/model.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen_v2/__pycache__/policy.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/qwen_v2/container.py,sha256=HpDpzng1cMtevhB-XykEKvmPPXENaDRV6BiLVYM7wjo,2817 +deepspeed/inference/v2/model_implementations/qwen_v2/model.py,sha256=EENwSZm-DZRQrv64UCLoEHop_U8xZ9yVejtboPzri_8,8029 +deepspeed/inference/v2/model_implementations/qwen_v2/policy.py,sha256=U9rMVvegwkKMqw6SoFXGZXobYTrjfpfySUryrpF0TDs,1140 +deepspeed/inference/v2/model_implementations/sharding/__init__.py,sha256=M0NsMjUqO0Rj0PAPm2lyRepH5lc3uFQswNM5Gr3QAcI,247 +deepspeed/inference/v2/model_implementations/sharding/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/attn.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/attn_out.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/embedding.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/mlp.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/qkv.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/types.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/unembed.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/__pycache__/utils.cpython-310.pyc,, +deepspeed/inference/v2/model_implementations/sharding/attn.py,sha256=QAapGaB4Bp1k9xI53-QhhyKFoaeIPandzI1qnPT2rxE,2376 +deepspeed/inference/v2/model_implementations/sharding/attn_out.py,sha256=MWf6O0P55Am0x947474ZqGcOEuZn2nGptKcNPCxfS0c,4909 +deepspeed/inference/v2/model_implementations/sharding/embedding.py,sha256=mJgZR289D6_ODuTZm4pVsedyJEHaXH2a4ncI8_7YdG0,1270 +deepspeed/inference/v2/model_implementations/sharding/mlp.py,sha256=UIpBwlbU9xDNv1iDbIfcxqPgcAf_WvO42NxtMGyZ1QQ,2895 +deepspeed/inference/v2/model_implementations/sharding/qkv.py,sha256=pQfXbEPeqEZ9eCJwcdSvJsid5jeBJ2jptWhqVaPJSPM,7609 +deepspeed/inference/v2/model_implementations/sharding/types.py,sha256=YqDHnyDLGhy2sf37n-a0eHIIXqcQ8OjHKHQ-rcKAl8E,575 +deepspeed/inference/v2/model_implementations/sharding/unembed.py,sha256=1AmukReQ6SAfFlq2YdiQwJrrvPLVx7fhel7KKCGXXjQ,1625 +deepspeed/inference/v2/model_implementations/sharding/utils.py,sha256=wKlbKIwSyblinL1xupsHDXodws2zfWhxt-flmac1grc,5066 +deepspeed/inference/v2/modules/__init__.py,sha256=oZhP5-VwkgPMtHVFO_9MefXPnSEfcHdQeIXaRg3VOeY,193 +deepspeed/inference/v2/modules/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/__pycache__/ds_module.cpython-310.pyc,, +deepspeed/inference/v2/modules/__pycache__/heuristics.cpython-310.pyc,, +deepspeed/inference/v2/modules/__pycache__/module_registry.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__init__.py,sha256=IJ5d0k84imLnM7jh0UcZ9k3_ITPU7YI_6ZoGRpEJcqs,449 +deepspeed/inference/v2/modules/configs/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__pycache__/attention_configs.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__pycache__/embedding_config.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__pycache__/linear_config.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__pycache__/moe_config.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__pycache__/norm_config.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/__pycache__/unembed_config.cpython-310.pyc,, +deepspeed/inference/v2/modules/configs/attention_configs.py,sha256=IWbmcmCc3CdL1E4FEUhrN9xU9wfbEaswUD8a0cM_D3Y,2834 +deepspeed/inference/v2/modules/configs/embedding_config.py,sha256=PuRrpOWLwtqjZRg9jumeMHuqzzsTcrbCjRxKL2L6xww,1812 +deepspeed/inference/v2/modules/configs/linear_config.py,sha256=fz9m8RIL6NVjbkq06-0WSP4-7IxIdSVQi1nRY4WQdTc,1301 +deepspeed/inference/v2/modules/configs/moe_config.py,sha256=EqX1OLq_QGt4IrSGhAJUXfVvy0MqjZk2E3toeUkb7kA,1289 +deepspeed/inference/v2/modules/configs/norm_config.py,sha256=iTKxfBxWcHs2JiiuxNZU356DGJR-a0OgDW3p5oshedk,839 +deepspeed/inference/v2/modules/configs/unembed_config.py,sha256=2E9XPh8VSRJckwng_DwbQksLMXtWZMFiXUjo5GWhzqI,822 +deepspeed/inference/v2/modules/ds_module.py,sha256=1h8X2TvYvDsUyEUjllPlziT7XOqwRxP02GBagzJw7FU,1856 +deepspeed/inference/v2/modules/heuristics.py,sha256=zkm-TNIKnOLnNBDTs2m3foFX_jrcHZVvlC_UX9lEggE,7356 +deepspeed/inference/v2/modules/implementations/__init__.py,sha256=0rEv4jixmtnw_sIe_Y5UKaIEcG7qwALUSzx1GNORTdk,289 +deepspeed/inference/v2/modules/implementations/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/attention/__init__.py,sha256=tNj2_fpavmUWaI5VAkUF0I7k0OxmIlZRNZZl7JIqrR4,157 +deepspeed/inference/v2/modules/implementations/attention/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/attention/__pycache__/dense_blocked_attention.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/attention/dense_blocked_attention.py,sha256=JgS7E2TzKmWylwRYWiYR-F7ldLdg8qhGr8Q9bXSPoCA,7735 +deepspeed/inference/v2/modules/implementations/embedding/__init__.py,sha256=CUiHnEi77BVJmH3aoJOanq5mTJIvEcIo3cHcbkddDHs,144 +deepspeed/inference/v2/modules/implementations/embedding/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/embedding/__pycache__/ragged_embedding.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/embedding/ragged_embedding.py,sha256=zddQlHcpjLe2_dKQUTp_5VfJwTP6L-BRk8HUHD2vPRo,2740 +deepspeed/inference/v2/modules/implementations/linear/__init__.py,sha256=FXT-bP-U4YlbMbBDrgwkpMPTHps3NiztMczMulIgmPU,203 +deepspeed/inference/v2/modules/implementations/linear/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/linear/__pycache__/blas_fp_linear.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/linear/__pycache__/quantized_linear.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/linear/blas_fp_linear.py,sha256=nHZHiLKVcppWyn0hNrQq4sHrS9UYKlHunrwL92DUVkQ,3629 +deepspeed/inference/v2/modules/implementations/linear/quantized_linear.py,sha256=5CQEpEBNq-Tzyt5dH0OK3RzZJUWjAxBieqzuC2OFJgw,7772 +deepspeed/inference/v2/modules/implementations/moe/__init__.py,sha256=XP3Ddz7eBtLvpIGum9u3gzUhl61w0W_jNfopcMI3QRE,143 +deepspeed/inference/v2/modules/implementations/moe/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/moe/__pycache__/cutlass_multi_gemm.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/moe/cutlass_multi_gemm.py,sha256=s1sdp-4N7dpwqg9m1FhkRvAQmaC0DxKs6h-5rzXyjyY,11082 +deepspeed/inference/v2/modules/implementations/post_norm/__init__.py,sha256=4hE4KqEcl74cOWm1kvexksZwHMK8rk-OQMzJYkdmgQo,141 +deepspeed/inference/v2/modules/implementations/post_norm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/post_norm/__pycache__/cuda_post_ln.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/post_norm/cuda_post_ln.py,sha256=DNHGpJ2wNC8P6vyyk8M8hDpuXpA7k-ni78W3olRKO18,2086 +deepspeed/inference/v2/modules/implementations/pre_norm/__init__.py,sha256=_yJ5_1apmG5KriWHdy9klvL5H0vFzmLyWnZf1VIrmrA,184 +deepspeed/inference/v2/modules/implementations/pre_norm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/pre_norm/__pycache__/cuda_pre_ln.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/pre_norm/__pycache__/cuda_pre_rms.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/pre_norm/cuda_pre_ln.py,sha256=VvFlm3Xc6U7VVyfXnFUeA5tsLw4TLhK_LVlKJkN4SmY,2744 +deepspeed/inference/v2/modules/implementations/pre_norm/cuda_pre_rms.py,sha256=WxZeP0B1ghhpOiMIHig4vb0ou0q7jkiLZFfbbdREXtQ,3245 +deepspeed/inference/v2/modules/implementations/unembed/__init__.py,sha256=ve_166eLGPGfIncTIvEvqoFGKi2pvtgvSLVVvTPXpHQ,140 +deepspeed/inference/v2/modules/implementations/unembed/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/unembed/__pycache__/ragged_unembed.cpython-310.pyc,, +deepspeed/inference/v2/modules/implementations/unembed/ragged_unembed.py,sha256=cAd3By6SBykoCHJI3oxwE0FXHcMP5HEF5CSrWJyBjjg,4921 +deepspeed/inference/v2/modules/interfaces/__init__.py,sha256=2djwkw0m2KAAuBV-LuWY0W0ncwIrCxDX5uwf4oc8E-U,519 +deepspeed/inference/v2/modules/interfaces/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/attention_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/embedding_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/linear_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/moe_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/post_norm_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/pre_norm_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/__pycache__/unembed_base.cpython-310.pyc,, +deepspeed/inference/v2/modules/interfaces/attention_base.py,sha256=Kl8aPbpzyhP4bF21LZ_OcEwHFoI1KFirOiukiV4YajE,3613 +deepspeed/inference/v2/modules/interfaces/embedding_base.py,sha256=3pXB9F4IlWMqFGV8ZYE3k3bG2Je4B9uvVnVS0pRzLeU,3071 +deepspeed/inference/v2/modules/interfaces/linear_base.py,sha256=8g8or7xUMtGX_3uQj6zt0YfUBS9SwSbRY02lrr3gUVE,2146 +deepspeed/inference/v2/modules/interfaces/moe_base.py,sha256=5BCoKWZplUfxMj7mOPA58b4aBGk7APM-WG-ByxUIUyA,2850 +deepspeed/inference/v2/modules/interfaces/post_norm_base.py,sha256=PktjhFD5Pi3y2zt_VEwB9L-yNVyIo4dHpJgdVwgbcVc,2123 +deepspeed/inference/v2/modules/interfaces/pre_norm_base.py,sha256=QlYoS3N4N3ZKfUJk6C9u_vCT10p4-ACVgAEydLquHnY,2106 +deepspeed/inference/v2/modules/interfaces/unembed_base.py,sha256=W5ToOG46DZwH5e1VnFXxXp_f9-EvnO2iIw6zqZkL38w,2055 +deepspeed/inference/v2/modules/module_registry.py,sha256=UyDd-5FYQwbEJ7ihPZ60GkEiQ7M6Z2XMWnEAaLZN_WQ,2073 +deepspeed/inference/v2/ragged/__init__.py,sha256=2fFLZ35zp9WgcDaQ3XaTti8Hzi99zt8EgueCaaOqZs4,418 +deepspeed/inference/v2/ragged/__pycache__/__init__.cpython-310.pyc,, +deepspeed/inference/v2/ragged/__pycache__/blocked_allocator.cpython-310.pyc,, +deepspeed/inference/v2/ragged/__pycache__/kv_cache.cpython-310.pyc,, +deepspeed/inference/v2/ragged/__pycache__/manager_configs.cpython-310.pyc,, +deepspeed/inference/v2/ragged/__pycache__/ragged_manager.cpython-310.pyc,, +deepspeed/inference/v2/ragged/__pycache__/ragged_wrapper.cpython-310.pyc,, +deepspeed/inference/v2/ragged/__pycache__/sequence_descriptor.cpython-310.pyc,, +deepspeed/inference/v2/ragged/blocked_allocator.py,sha256=8rwqli63wrLAbzOuLoOgVqLi5UlpPlyxXtyRS1gU6zM,3661 +deepspeed/inference/v2/ragged/csrc/fast_host_buffer.cu,sha256=8U10U6VZk12oGwXCniQsqduta_cEcVBFuN9Fq3LO1JQ,501 +deepspeed/inference/v2/ragged/csrc/ragged_ops.cpp,sha256=VKEN3FTL2T2r-UKg6ARuqW5ToAHQkfzql7F7nVL2dEw,2726 +deepspeed/inference/v2/ragged/includes/fast_host_buffer.h,sha256=N73g7w1uE1gEsyWzrEnEibGeU8nBJPDeL-SWJEWK_0M,302 +deepspeed/inference/v2/ragged/kv_cache.py,sha256=eekwPz2OMNgQJHYXcxIrW86yK0guS419GeNqXJ38wIg,8559 +deepspeed/inference/v2/ragged/manager_configs.py,sha256=pRSnmVJKxbaiSNrzZIHKsnJ50lMSLufYwXFhMTmk6VU,6077 +deepspeed/inference/v2/ragged/ragged_manager.py,sha256=Mk09dkqkEj5CMA9kWvTWuQeiw7fWiO9xn_nMIs-1Hho,7334 +deepspeed/inference/v2/ragged/ragged_wrapper.py,sha256=RJJpqSVz6vsElbtfePx99TDYZ2Q_Hao7qt_2Jt6RP2s,12701 +deepspeed/inference/v2/ragged/sequence_descriptor.py,sha256=oo1kf0upitTJkbZB6ME7GY3_MoOnGABXcw1coq49ljM,10891 +deepspeed/inference/v2/scheduling_utils.py,sha256=S1MPmx8zU8qHmDd_eDLhKP6hh71N9lo_Q8kJxO1AyOY,1363 +deepspeed/launcher/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/launcher/__pycache__/__init__.cpython-310.pyc,, +deepspeed/launcher/__pycache__/constants.cpython-310.pyc,, +deepspeed/launcher/__pycache__/launch.cpython-310.pyc,, +deepspeed/launcher/__pycache__/launcher_helper.cpython-310.pyc,, +deepspeed/launcher/__pycache__/multinode_runner.cpython-310.pyc,, +deepspeed/launcher/__pycache__/runner.cpython-310.pyc,, +deepspeed/launcher/constants.py,sha256=X-6bI3aC4NXSZgrMJ6LaIILrsWu_TNteNBwgKOhNrgI,375 +deepspeed/launcher/launch.py,sha256=ezb11bDqxZOBEP70jw8EgMT4hl-sU8-AubGrq83KyUA,14777 +deepspeed/launcher/launcher_helper.py,sha256=tpvx6ItaLbBNYnUwP8XoKcn8P0mFXRffpB1Z_uLB_U0,3931 +deepspeed/launcher/multinode_runner.py,sha256=37Q0CR_WlOS9Ta9tFePYsugjz9mSZrQkVXU4WQRKdoI,17023 +deepspeed/launcher/runner.py,sha256=r6OkPOy_8yw9YnY_TZw3_T0b5ZzUr3uGFbAsIohwAXU,24713 +deepspeed/model_implementations/__init__.py,sha256=jJCJcnbvmFc9eQxIUIb7Gd6Ysd8ijSuuk-aaqFrDkK4,220 +deepspeed/model_implementations/__pycache__/__init__.cpython-310.pyc,, +deepspeed/model_implementations/diffusers/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/model_implementations/diffusers/__pycache__/__init__.cpython-310.pyc,, +deepspeed/model_implementations/diffusers/__pycache__/unet.cpython-310.pyc,, +deepspeed/model_implementations/diffusers/__pycache__/vae.cpython-310.pyc,, +deepspeed/model_implementations/diffusers/unet.py,sha256=W2_9-c-5Tz-nymu7kK2du2rzpV8-eC8JZEpQGRhGMek,3056 +deepspeed/model_implementations/diffusers/vae.py,sha256=xbjGaYsXeJA4nu49gQaGO5GrEazCHwcTUVqPsxoOTn4,6244 +deepspeed/model_implementations/features/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/model_implementations/features/__pycache__/__init__.cpython-310.pyc,, +deepspeed/model_implementations/features/__pycache__/cuda_graph.cpython-310.pyc,, +deepspeed/model_implementations/features/cuda_graph.py,sha256=-KgILcht5qw_ayzI0CqSPGGFoqZKHG8E5CTfD8ACgw8,563 +deepspeed/model_implementations/transformers/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/model_implementations/transformers/__pycache__/__init__.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/clip_encoder.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_base.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_bert.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_bloom.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_gpt.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_llama2.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_megatron_gpt.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_opt.cpython-310.pyc,, +deepspeed/model_implementations/transformers/__pycache__/ds_transformer.cpython-310.pyc,, +deepspeed/model_implementations/transformers/clip_encoder.py,sha256=BTc_do6u4A79uhoNwQYxKrbqSnk2C63BO6xN0Ez-fz0,3096 +deepspeed/model_implementations/transformers/ds_base.py,sha256=feBwoCtVctmdYb_KZruLVCJr7WUBsfNZtVUxEKtO5MQ,388 +deepspeed/model_implementations/transformers/ds_bert.py,sha256=fckfXCn3zf9DsvqiyVAb0pwn25GA9H4ZceaCI4gLLSI,667 +deepspeed/model_implementations/transformers/ds_bloom.py,sha256=_YHlkBOUBTT27ZRWs7x6DxKz_5gILGBAe2YNmlkCij0,669 +deepspeed/model_implementations/transformers/ds_gpt.py,sha256=PkhQU0iHFkSGRxaEb_j1UDvRWruwrOPF8MLZfAo55ZY,665 +deepspeed/model_implementations/transformers/ds_llama2.py,sha256=6rAOou4WaViFUbjsf9nOS35vMxl9Hqe5BXpL6Vce-HA,2792 +deepspeed/model_implementations/transformers/ds_megatron_gpt.py,sha256=kVtyp2Uss9Z_MRGwgAvJI_2tfWlPgzMpywIPS0xeOIo,682 +deepspeed/model_implementations/transformers/ds_opt.py,sha256=oHS210T27R5zOtg8ik_QwV22F8BIJxKVWPiPQaDXzjY,665 +deepspeed/model_implementations/transformers/ds_transformer.py,sha256=VlDZgrp0PMx9pz5DrpO56m2s73dY4WuAYUJAcN5gtGU,8923 +deepspeed/module_inject/__init__.py,sha256=lEgBDKuhccVUSVhcBdqZU9dCUXzxTrFySQ-E4dYbVNw,444 +deepspeed/module_inject/__pycache__/__init__.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/auto_tp.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/auto_tp_model_utils.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/fusedqkv_utils.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/inject.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/layers.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/load_checkpoint.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/module_quantize.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/policy.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/replace_module.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/replace_policy.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/tp_shard.cpython-310.pyc,, +deepspeed/module_inject/__pycache__/utils.cpython-310.pyc,, +deepspeed/module_inject/auto_tp.py,sha256=9hl8eL7ccoiq7nv36qUR6GsbRm4sxRef84sSKFVW3cw,24253 +deepspeed/module_inject/auto_tp_model_utils.py,sha256=G_tPVAaT7b422Wa8J785rEOcM742uXnoW-5fvd1cinU,5910 +deepspeed/module_inject/containers/__init__.py,sha256=AQbuSXpxEyLpLN3RoAkQeyvtioX_8ShH6C8cqzcQ2Jw,1015 +deepspeed/module_inject/containers/__pycache__/__init__.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/base.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/base_moe.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/bert.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/bloom.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/clip.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/distil_bert.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/gpt2.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/gptj.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/gptneo.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/gptneox.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/internlm.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/llama.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/llama2.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/megatron_gpt.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/megatron_gpt_moe.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/opt.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/unet.cpython-310.pyc,, +deepspeed/module_inject/containers/__pycache__/vae.cpython-310.pyc,, +deepspeed/module_inject/containers/base.py,sha256=kw-culwCQ480zVCgeY7K9LH30BSuKWuPtm2lBMh-L_o,13762 +deepspeed/module_inject/containers/base_moe.py,sha256=lfyM4AhAXmYp5dxa60XoEbcJDQVj9uB-pRlzIDR2ypI,5756 +deepspeed/module_inject/containers/bert.py,sha256=f0VMB8R5mCHxNgIVhQSYeTgVsxo1IN4BV6vzSIOgLZc,3769 +deepspeed/module_inject/containers/bloom.py,sha256=dXSni0GSc3FOL3fyjZWMVGbn2MqW_12mqKZt7NFWZyg,5615 +deepspeed/module_inject/containers/clip.py,sha256=KR3s7ev3v-ccvrFhqfm2pKjApQeYeCfhm4aZZ0_Ou_A,2822 +deepspeed/module_inject/containers/distil_bert.py,sha256=j8ufi0mXYe862fLYMtY1J0Rh2uY-xzkXKDKrhK-EsgI,3188 +deepspeed/module_inject/containers/features/__init__.py,sha256=GOivP3ix552VHgGPZ70UehyLQ_YZ8TLRc0-rgTCO07c,275 +deepspeed/module_inject/containers/features/__pycache__/__init__.cpython-310.pyc,, +deepspeed/module_inject/containers/features/__pycache__/gated_mlp.cpython-310.pyc,, +deepspeed/module_inject/containers/features/__pycache__/hybrid_engine.cpython-310.pyc,, +deepspeed/module_inject/containers/features/__pycache__/hybrid_megatron.cpython-310.pyc,, +deepspeed/module_inject/containers/features/__pycache__/megatron.cpython-310.pyc,, +deepspeed/module_inject/containers/features/__pycache__/meta_tensor.cpython-310.pyc,, +deepspeed/module_inject/containers/features/__pycache__/split_qkv.cpython-310.pyc,, +deepspeed/module_inject/containers/features/gated_mlp.py,sha256=ep8fnRs-06C7oL75w5UBAaLyCb9BwAx2v3c-sBhkB2k,5397 +deepspeed/module_inject/containers/features/hybrid_engine.py,sha256=ki_k_0SstM39XfN0SNWwiSO3qzIBs2qKxxAyrGxpfUo,8374 +deepspeed/module_inject/containers/features/hybrid_megatron.py,sha256=qp04fT0ManTJwzVdABFP6OWvLQsD6pWDaJAxNJU4-ZQ,4129 +deepspeed/module_inject/containers/features/megatron.py,sha256=BrbmtBC9ZKFL4ESbIYRJLzoEnEp3QOhkj1VcoqlcEek,1200 +deepspeed/module_inject/containers/features/meta_tensor.py,sha256=F0Zqh5vyp3fmJM8XAbVGmg4PwU4EjNcKT_27qSSCiTE,2926 +deepspeed/module_inject/containers/features/split_qkv.py,sha256=totYVBg1M6DcsdC3762Vhoq7gpGJ1BIioTfDFdFwkF0,7136 +deepspeed/module_inject/containers/gpt2.py,sha256=98yUKFAGrfyyCv5yK7V4Zr8oWf6m4qTXgGIzUbRG0kM,2221 +deepspeed/module_inject/containers/gptj.py,sha256=OJ4Z-G5sjd-92yrNDJ0D53Jz_o0n2tfkg9fS9Mll1ss,5074 +deepspeed/module_inject/containers/gptneo.py,sha256=NakhEgR8qh-U9rZ6JD6nIyY_r1mH2obkrgLat4eKpMo,5790 +deepspeed/module_inject/containers/gptneox.py,sha256=zM8hYq0e1FKrDd0n0m1oUPKKMDNpwxVmXQHm8jlv1CA,5897 +deepspeed/module_inject/containers/internlm.py,sha256=3JYddGlL-87jC35MSKvYw9XzZFHYIIYC4Un0Ek5jsL4,7761 +deepspeed/module_inject/containers/llama.py,sha256=aQp5C7cd87xQScxVTbbo5Lzy0oplVtz3uCma1ptw8_8,6740 +deepspeed/module_inject/containers/llama2.py,sha256=5WbtiVVAL1dVseHiO5kvx2qOwVAAvq-cKBKo0JZ7YjA,6359 +deepspeed/module_inject/containers/megatron_gpt.py,sha256=PiD58-Ul61ZJE4QAkCKCtGwjg44wMaSPOkqnWcRudyo,5417 +deepspeed/module_inject/containers/megatron_gpt_moe.py,sha256=xFOff3OOIKMz2YAFRrqLHxpkzLgB-chA4L69HXiey98,3936 +deepspeed/module_inject/containers/opt.py,sha256=exIuEYIu-KCjwPL3L-ykz1ud477q-F4WBwbXIL0Hef4,6905 +deepspeed/module_inject/containers/unet.py,sha256=fWteUHNx0S8u6C3xpiU2TLK4snlTr7Bv81yCNlVIDY8,1862 +deepspeed/module_inject/containers/vae.py,sha256=yLb5XWwZcaVOXVbUjGW1n0hUr3wrL4rqzqFkvBbD1Y4,1505 +deepspeed/module_inject/fusedqkv_utils.py,sha256=ZlbeQdq8UqdNrfe1NQ-PnIYLXpiNm0BzvE1kGeJKFPw,5553 +deepspeed/module_inject/inject.py,sha256=KkpC_LLybQeiFzIhr6pz-OlKOl92XNFf3Vh7GqD7jsY,4719 +deepspeed/module_inject/layers.py,sha256=L6TVH7Ao7qzWDCL3h5tLZrDcWVQvqdB8YZrv1bgoDTk,5618 +deepspeed/module_inject/load_checkpoint.py,sha256=f2Usdz9F7UTx1eTQHfUL1ZdaNHlNO5s3YHZxveZpFOU,15294 +deepspeed/module_inject/module_quantize.py,sha256=NrOwvSfPWWOnlUVe82sSOZhay76JzqkgZ-j4NP-yEOY,3144 +deepspeed/module_inject/policy.py,sha256=kfyIzx4R4INZve94RPBPZu0jnnQKYpWtou-nRvZQ0Z0,8259 +deepspeed/module_inject/replace_module.py,sha256=HWD80PNrGl_CnMf-EdAd7DqkXfSm4PURV3sa30neLpg,30642 +deepspeed/module_inject/replace_policy.py,sha256=jlIhG7pbXcjD8JPjWJCU-Gs_WSTG_q7K-IHmewu04pw,1119 +deepspeed/module_inject/tp_shard.py,sha256=X0yC-u09MWosMkcLJVAIVcj09GrFD8Smpr2qZ5-1gO0,1472 +deepspeed/module_inject/utils.py,sha256=q5gZWY7YK_-BI7ce16-G8dvU9Qgx7Qbezz9Hwz7oKS0,1995 +deepspeed/moe/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/moe/__pycache__/__init__.cpython-310.pyc,, +deepspeed/moe/__pycache__/experts.cpython-310.pyc,, +deepspeed/moe/__pycache__/layer.cpython-310.pyc,, +deepspeed/moe/__pycache__/mappings.cpython-310.pyc,, +deepspeed/moe/__pycache__/sharded_moe.cpython-310.pyc,, +deepspeed/moe/__pycache__/utils.cpython-310.pyc,, +deepspeed/moe/experts.py,sha256=7xFMeuKcBmfyVdaVKpUw5uyGzHYbtxXTiKqcUuBjrQ8,1315 +deepspeed/moe/layer.py,sha256=Wf2r5tp0W2-nf0zXT8x2vEaCaYKXwFFXU9Ybb6N06wE,6899 +deepspeed/moe/mappings.py,sha256=sgjH5Ix45XBeygDuVZWp1ieklzVHrjem5tWTGhP0uHQ,3529 +deepspeed/moe/sharded_moe.py,sha256=x_v7nMw35F77BOIxQorIdFZ6DUaVl7jxn7bPDcJCrSc,22201 +deepspeed/moe/utils.py,sha256=XinKyQ-nbFCflOjRBFGEHRHwrdntnnEdn0gP-pos9Eg,5135 +deepspeed/monitor/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/monitor/__pycache__/__init__.cpython-310.pyc,, +deepspeed/monitor/__pycache__/config.cpython-310.pyc,, +deepspeed/monitor/__pycache__/csv_monitor.cpython-310.pyc,, +deepspeed/monitor/__pycache__/monitor.cpython-310.pyc,, +deepspeed/monitor/__pycache__/tensorboard.cpython-310.pyc,, +deepspeed/monitor/__pycache__/utils.cpython-310.pyc,, +deepspeed/monitor/__pycache__/wandb.cpython-310.pyc,, +deepspeed/monitor/config.py,sha256=pMEaW2VCbZFtqVmAy567HOzbgdhEftSGtCKo_Osh9DI,2498 +deepspeed/monitor/csv_monitor.py,sha256=E9bWWx7QKhex8GWOxMOLjOsrAyBmXqCkaxNUyVirSY0,2907 +deepspeed/monitor/monitor.py,sha256=hGvYlT3zTG7OTdxUT8AvPOxahvq074eWYoDEYxMT1PM,1604 +deepspeed/monitor/tensorboard.py,sha256=9crE_YD9adHS65QmRxAuSuezRQUpL_VHrJMZmPhmuiE,2227 +deepspeed/monitor/utils.py,sha256=mUyOH9IYfzaogOYdnaxFEVBW1ySuUBFiOOKu_p_F09U,784 +deepspeed/monitor/wandb.py,sha256=DgdWuMEkJXDiTS4ZAWz_UaOP8WFSUOjhayUTl8q3DhU,1150 +deepspeed/nebula/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/nebula/__pycache__/__init__.cpython-310.pyc,, +deepspeed/nebula/__pycache__/config.cpython-310.pyc,, +deepspeed/nebula/__pycache__/constants.cpython-310.pyc,, +deepspeed/nebula/config.py,sha256=THO1Mwto2utxnBWRfn0vIVWevOpT2EPFJFZkDCJF-ck,1764 +deepspeed/nebula/constants.py,sha256=0HQOkViV_lRv13vtczIhBJG_YAH876vhh0R4BqHAj68,2786 +deepspeed/ops/__init__.py,sha256=Ki05e_FpirSpYq2Tv9ZYmrMK7bgDDLF3cV9CL8KNSSE,496 +deepspeed/ops/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/adagrad/__init__.py,sha256=aJHYZSC21yulH09u72ZYAe43nLgeMemqvmC9wNNh9CQ,141 +deepspeed/ops/adagrad/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/adagrad/__pycache__/cpu_adagrad.cpython-310.pyc,, +deepspeed/ops/adagrad/cpu_adagrad.py,sha256=6C4WoAPbLe7esT8Ite1YDgpk-qtFo1bYfvZv9ybKFIs,5089 +deepspeed/ops/adam/__init__.py,sha256=ZVagnjkzHVw4akcDibUCfsArTcu8J9wsz9xKmbrJnIs,169 +deepspeed/ops/adam/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/adam/__pycache__/cpu_adam.cpython-310.pyc,, +deepspeed/ops/adam/__pycache__/fused_adam.cpython-310.pyc,, +deepspeed/ops/adam/__pycache__/multi_tensor_apply.cpython-310.pyc,, +deepspeed/ops/adam/cpu_adam.py,sha256=pRJKMXtrRfnNfIKAksAlDHWhBFYcmYESxdkO28jHPMw,8534 +deepspeed/ops/adam/fused_adam.py,sha256=5_JBk_UvBFcnOL0_hPr5SVFATmmmKHFIlVSR1am-sBc,8767 +deepspeed/ops/adam/multi_tensor_apply.py,sha256=APt3UCnfw-nLjJOGWXC3izWRXNu-16TYhB-s213DpM0,429 +deepspeed/ops/aio/__init__.py,sha256=dHc8QXzdN4Cw0D7px-gbO_fzV-lKT21ySZJ67LtUJuI,136 +deepspeed/ops/aio/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/csrc/adagrad/cpu_adagrad.cpp,sha256=EzyqrGziDv2r76SxEtVNzIpp_V-jKFgq1lThAC5Mm5k,9108 +deepspeed/ops/csrc/adam/cpu_adam.cpp,sha256=bDny-11rq18NQoRXkW62zQYTnCCCoqiXPTot3TeK3fI,535 +deepspeed/ops/csrc/adam/cpu_adam_impl.cpp,sha256=4kvJolXsOi6DnsYuS8J2x9_hEGhgaSiJ2aUjZO8DNPQ,10655 +deepspeed/ops/csrc/adam/fused_adam_frontend.cpp,sha256=tu6pBcAvqcdh1ti6SB7fj8HnqFDQg3Rqbaa4xsQ2v7M,880 +deepspeed/ops/csrc/adam/multi_tensor_adam.cu,sha256=tA2gAWdXNZLjA8nQylJ0hXKjDUQIMgVR3VbQPtpj5WA,6579 +deepspeed/ops/csrc/adam/multi_tensor_apply.cuh,sha256=tMXW3UwsOhNM4J1axQbNkn4gNfmig7_vXPiBLdFCCv4,5595 +deepspeed/ops/csrc/aio/common/deepspeed_aio_common.cpp,sha256=UMllIX03ZJA7Hv0sVyp2MLGdE85cuuI8EWAws09I8as,13406 +deepspeed/ops/csrc/aio/common/deepspeed_aio_common.h,sha256=otOH-e8m43YmLoATqTDZy8fI9MzHpM_cr4PIoA8by-A,1364 +deepspeed/ops/csrc/aio/common/deepspeed_aio_types.cpp,sha256=vrR8UQ9EK-8EgHBUyQIMgPLxNSFtwNUQVeoClggfPnQ,2033 +deepspeed/ops/csrc/aio/common/deepspeed_aio_types.h,sha256=pnFoIKThq3P_bHoo-jxZgL86SMbXlJZGfpghK7pQqBg,1402 +deepspeed/ops/csrc/aio/common/deepspeed_aio_utils.cpp,sha256=sT1zpUXoVe7v1hQS1BIfFqRFobBg22yfw98YW7UNrUo,4330 +deepspeed/ops/csrc/aio/common/deepspeed_aio_utils.h,sha256=8t9Sj0hxme2D9lYkkelZLC6H1Vq-8d1GxDIWX98OGhM,2086 +deepspeed/ops/csrc/aio/py_lib/deepspeed_aio_thread.cpp,sha256=NdbibH_mRTBEASvkHn96ZVcI99cnzEuKhejWXwBzzXg,3316 +deepspeed/ops/csrc/aio/py_lib/deepspeed_aio_thread.h,sha256=EQJggDpuTyIk-RNw4vPzxtT3U67dx9gUa_5l8a03ygQ,1427 +deepspeed/ops/csrc/aio/py_lib/deepspeed_pin_tensor.cpp,sha256=dYGOAYk1mZRNwYoc_ARUm052Hpj7nBTL3Ve3SXLsIQk,1240 +deepspeed/ops/csrc/aio/py_lib/deepspeed_pin_tensor.h,sha256=j5mJ2HevYsjg6jCmxvn1Sg8-TnvnMTOa8f9q0QYKWvw,722 +deepspeed/ops/csrc/aio/py_lib/deepspeed_py_aio.cpp,sha256=F9agfNs8bIEMUcirzAL_q5_Jv-Xqd-H9FKfcOk3GTaM,4326 +deepspeed/ops/csrc/aio/py_lib/deepspeed_py_aio.h,sha256=Bk5OV7sf5C410s9mi_zUOdPWw9i2tSVCYUpKwbhsWxM,1052 +deepspeed/ops/csrc/aio/py_lib/deepspeed_py_aio_handle.cpp,sha256=Nh0tz4VhJ1LfofDtcMPzLlsr6KK20qp8ePpD6Voftvw,10113 +deepspeed/ops/csrc/aio/py_lib/deepspeed_py_aio_handle.h,sha256=ePRbzA9PjWRDjT1SbMTzAhx8UX5AYmNml7IeXPq6bBQ,2459 +deepspeed/ops/csrc/aio/py_lib/deepspeed_py_copy.cpp,sha256=d2aL7n0MFzFbssjz85YTcnAHA0l2rn4EPpDhdk5FISw,4422 +deepspeed/ops/csrc/aio/py_lib/deepspeed_py_copy.h,sha256=wKacjqIxo_umXV-8ySqieZLMtJHQNkbuGA8JG-gCzOI,1246 +deepspeed/ops/csrc/aio/py_lib/py_ds_aio.cpp,sha256=6qeD9qdFq7b9hZIVODHWRtwZpG5purlsl8P1WoZEsbU,1805 +deepspeed/ops/csrc/aio/py_test/single_process_config.json,sha256=onGthM2MLfZLjHehrPhjnNs-t3bXR_qXHWJ2wc6m1FE,359 +deepspeed/ops/csrc/common/custom_cuda_kernel.cu,sha256=Y0uyZz5CfsWf2_aOCDeJM8-4r9IPwn_iskqJs5s0tDA,1278 +deepspeed/ops/csrc/cpu/adam/fused_adam.cpp,sha256=m45BQnPCTkrYHiYcdKNa791zqVqtbdwia4TFoXjnOK0,1433 +deepspeed/ops/csrc/cpu/comm/ccl.cpp,sha256=Q7dj8TxG-764jAqLRxPswnRtLQrikkx4k0luAYTHQlg,23986 +deepspeed/ops/csrc/cpu/lion/fused_lion.cpp,sha256=-_QUZhwjkjsePW-YyZ4ishpUPCGmbgfDp4C2jNgRIA0,1230 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/attention.cpp,sha256=1d5msQb6IiUdDeAYpzsiKjFHEFPHNAvBXDkO-aY2W_g,2095 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/attention_back.cu,sha256=10fKkFtrljPwiHjIAhoZJrBEcIRjdBP8JDagnFdWyOU,9971 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/attention_cu.cu,sha256=lfsGocib3dneilmyTKnEL9fdmALP8v_UhvwApmwgpVM,7509 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/epilogue/epilogue_grad_bias.h,sha256=ziZZar1z41wsr6ia5HtX8aAjpAuomtq5cCgZMzYylWI,12185 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/epilogue/epilogue_pipelined.h,sha256=RvYmxAW4NN7kMc9pLfEmkXRO-nXMKR1BX3fhhRUG3SI,24898 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/epilogue/epilogue_rescale_output.h,sha256=WWnsDQeR-XHcjKeEZixxhaTkv89tpyc1jaXoSAEiWt8,9606 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/epilogue/epilogue_thread_apply_logsumexp.h,sha256=Q_QP5LK3AwDtt6GHxLdUg-ttVqtDkxN2gv4bXiDnlPw,6392 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/custom_mma.h,sha256=rXA6J4g_TuQ1Ufm2c0Q2dMcuC1FV6X4MpkYpk7TeYp0,6270 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/custom_mma_base.h,sha256=zn3uylR7vb9GNLKG3UjodjXyPO_XpIuK0bRBwILm4zo,6511 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/custom_mma_multistage.h,sha256=WeVgJF-IIsVjd50TwDsL_oKwr2UMYUWdLEdhOiy_bfE,30281 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/custom_mma_pipelined.h,sha256=7MyvAD-eXwtNkfyjtd459IvbAbhW81Ys1RzFN8TasTM,15770 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/find_default_mma.h,sha256=l0wa_nFB9Vrz11bR_0lH-D4vaK-sWfuHm31Mg_Em3bc,9420 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/mma_accum_lambda_iterator.h,sha256=G6LeFpLrmBPVVsnfiYCPjh0h4VdJ5su6jyYPLvPymqI,16280 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm/mma_from_smem.h,sha256=o_6L3rJlRtGmughlt9g0xLCzvTqBbUObl72lNxkbhNY,84216 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/gemm_kernel_utils.h,sha256=Evs8pB0tze8xOovv8Yv4pqBwxe-qxcncMkNztI1OuS4,11615 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/epilogue_predicated_tile_iterator.h,sha256=Vl83Qgukp3lN0EwAkZdftc1gxIfvwvFzjRw7M8wSQ9A,27084 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/make_residual_last.h,sha256=bl7iRKlZYnjqYwoST7SRHWwqygjsvYCUmLMBfbAxV7Q,4245 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/predicated_tile_access_iterator_residual_last.h,sha256=Jg4qjZ0ukBy2ZIZ-2wxZ9u4mrIcdVAcil56e7CdCY8M,71866 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/predicated_tile_iterator_atomic.h,sha256=RQ1sW5cWwiPveRxIs8rVuOgaXBBhsgHU5mbiLh6rawc,31326 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/predicated_tile_iterator_residual_last.h,sha256=aZy_jvL9zakb9oeFXkoihOabhLLhZjRSbLkf_6lUwS8,71626 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/transpose_warp_iterator.h,sha256=PtMp-E5HhdBmwgRiEbE2ZjlVL-b97euA80waCNO7jYQ,2536 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/iterators/warp_iterator_from_smem.h,sha256=QvDXwcGimdRqSoaGSzN5aS3q37VSXeihlU_A6ZPee9M,10125 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/kernel_backward.h,sha256=lLlud-SKkjuOa9UcJkOUfSZRV3RQpPRf9J6qMtAODBQ,95042 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/kernel_forward.h,sha256=CTv6YV9cCkmzU7w49AqRhENGWOccHYz0d4YzmRhvVBM,47657 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/transform/bias_broadcast.h,sha256=7yLAnvgViEcY5qls5eEJyfQDNkToG0TfPKQhm8Qi0-E,5952 +deepspeed/ops/csrc/deepspeed4science/evoformer_attn/transform/tile_smem_loader.h,sha256=Dr_uSpu3dH8GAfChWggDeon9rn7F4G8BFrjm5_Xyt0E,4476 +deepspeed/ops/csrc/includes/StopWatch.h,sha256=DocXNohnOPWiZBqfrdI6AKV9IggYRB1Bylzxfke_i5s,1981 +deepspeed/ops/csrc/includes/Timer.h,sha256=4mYxcATDJElSy5GCj_xFDZ2AvHHmMrhJlW0jJbMP7nQ,1180 +deepspeed/ops/csrc/includes/activation_type.h,sha256=WkFfkL5mgLyYAZimLUouThCkyA2aiF9LQlh1q206L9o,264 +deepspeed/ops/csrc/includes/compat.h,sha256=5ivuTerIqiCaYu2HpRPzJcUtqMsAtRB7aOXel69HeZQ,336 +deepspeed/ops/csrc/includes/context.h,sha256=Cs6hRXBiFSjpKKQOAMCX9F0uP_Hh1IeZLloNJxHxk1E,7207 +deepspeed/ops/csrc/includes/conversion_utils.h,sha256=augq8Zf-t46MaxWh0ryFb46_Guxud0k4kIK-h93rI8Y,12379 +deepspeed/ops/csrc/includes/cpu_adagrad.h,sha256=A95TmBq4sB7R45pVc4M5URd_n-KRRQ1ScWROqeUOra8,7091 +deepspeed/ops/csrc/includes/cpu_adam.h,sha256=Surj-3wGenseYkuDwz7FAIU79rt1AqebP1tF0f-liFM,11096 +deepspeed/ops/csrc/includes/cpu_lion.h,sha256=zhz1VmhzZ7y54SD5slN-nPC71onAJ5sqnJ2-2lw7mWw,9130 +deepspeed/ops/csrc/includes/cublas_wrappers.h,sha256=7X3R1jZFKpnzWUAqdns8FxMAqIHWes1X9nWRwiGpDlY,3341 +deepspeed/ops/csrc/includes/custom_cuda_layers.h,sha256=XRAzBW3TP0T74eFm_yEn9-JracTbiEB1vuPtJ6YbxpA,13156 +deepspeed/ops/csrc/includes/dequantization_utils.h,sha256=uFpPmBIYd3gNkKXImQLidYnGDYYTXkpY9V4o-AkdYpk,7301 +deepspeed/ops/csrc/includes/dropout.h,sha256=l5JuSzyfxRsfufBKIebPwpghnmYR_E6ucCmGbLvZ1qc,2195 +deepspeed/ops/csrc/includes/ds_kernel_utils.h,sha256=SNb_fMME8CA5YI1ws4kCW99jR4S2ccQqIgTA7ymmttQ,1280 +deepspeed/ops/csrc/includes/ds_transformer_cuda.h,sha256=OwVCwN4tS1kMEeF9J6QV9GG8AvtYklaHRJrMPmji-zY,6164 +deepspeed/ops/csrc/includes/feed_forward.h,sha256=IgzSMREF1do6A8pvcJfICvM5ShN2AB7zCoj5w3p9wxw,3189 +deepspeed/ops/csrc/includes/gelu.h,sha256=iCsuAdNFzhLeccvc5D7KnsAcOcvXN_QD4jlaFRPF340,1018 +deepspeed/ops/csrc/includes/gemm_test.h,sha256=HzdC86ZxaCF6FvedU4ZFb77rbYvQkYVfW-5vkvsPZWI,10390 +deepspeed/ops/csrc/includes/general_kernels.h,sha256=0ksHr78VwjPiAfYPAloHM5uMxriRfNPEZyk9OklLhhI,1507 +deepspeed/ops/csrc/includes/memory_access_utils.h,sha256=wqpAqnfmSD5Fx15qwsnY63VCL6tYyJaChCqLxakM10w,33966 +deepspeed/ops/csrc/includes/normalize_layer.h,sha256=j2lJkrxENFILqdUpEZIMj1qGrFty5SNoAeUrifThvSI,7089 +deepspeed/ops/csrc/includes/quantization.h,sha256=tCPid1R06_cSWtsmq1dEjuIsLn70MCQqPzj01F04vFw,4380 +deepspeed/ops/csrc/includes/quantization_utils.h,sha256=_pa6UHLgvDlXgVOfBgby1fHDi-IPORXwdvBX5rbGz88,17370 +deepspeed/ops/csrc/includes/quantizer.h,sha256=YIRNwwr1eZYMmXbs6AZT82shpXuWbn94P4OVMcA3tss,346 +deepspeed/ops/csrc/includes/reduction_utils.h,sha256=189h93WF77CblgWonfW5eQqoTeJnb-Rt2Ov2fZ6mO-E,22352 +deepspeed/ops/csrc/includes/simd.h,sha256=vnHiRltNIBRAEqAjv3DJbKhFNA6PE1-dPlK0OFhS7Iw,6601 +deepspeed/ops/csrc/includes/softmax.h,sha256=UGZHziAfbGN6OdTIA24G5CWHhV8hAfQbq16NN2_0Wlo,1642 +deepspeed/ops/csrc/includes/strided_batch_gemm.h,sha256=cJioBJNNFb00CApaAKWHossqnb13K4m9rKG6hEEZWxM,6807 +deepspeed/ops/csrc/includes/type_shim.h,sha256=8VuX9zK5EsmleO7wuAq9D1eHZC7zKWaf0N97eaGg7x8,6388 +deepspeed/ops/csrc/lamb/fused_lamb_cuda.cpp,sha256=icBfzI6QpN8LqIZT4-AY91UXlg_zDqkptNZy10erbws,3995 +deepspeed/ops/csrc/lamb/fused_lamb_cuda_kernel.cu,sha256=vjhQw1pj6v19tVE2otOCSStk53Ss0R82g9o1lXfJ5vw,15291 +deepspeed/ops/csrc/lion/cpu_lion.cpp,sha256=8BSc1jb7WaVOD1DMwLx-B-W4VQq53bth7s4nWieqvMU,535 +deepspeed/ops/csrc/lion/cpu_lion_impl.cpp,sha256=k2MeaGc9bABgXYkB3q2QPXbqURBWqWqK6n3Kv0WWkE4,9123 +deepspeed/ops/csrc/lion/fused_lion_frontend.cpp,sha256=RMiEOozd9gjXPidc4Ykg-kDIJek0OOASFVwmLIfFs0M,732 +deepspeed/ops/csrc/lion/multi_tensor_apply.cuh,sha256=tMXW3UwsOhNM4J1axQbNkn4gNfmig7_vXPiBLdFCCv4,5595 +deepspeed/ops/csrc/lion/multi_tensor_lion.cu,sha256=11RptKP9XmwMuvGBrmq3P1T8PAH9IvBJirB8gInMVoQ,4377 +deepspeed/ops/csrc/quantization/dequantize.cu,sha256=iibytiJox1QcqE8b57KCLPOaD610STv2VUmKoC7RVM8,3302 +deepspeed/ops/csrc/quantization/fake_quantizer.cu,sha256=CPMZnjOjJTKFibCBeYG1WrXnwEG-0l6POuZCRsBrHBo,37581 +deepspeed/ops/csrc/quantization/pt_binding.cpp,sha256=NGZfFHg3q4swpD4e2AZh5WPxlK159cnxZcOdOHf3cN0,12374 +deepspeed/ops/csrc/quantization/quant_reduce.cu,sha256=ona0QemL3ZGoiRlXvfkkP4oTpRoG_WvB81jX0pi8K2c,11311 +deepspeed/ops/csrc/quantization/quantize.cu,sha256=IzmI5sq3V8B3tF92i84wlouV186djjVvdISREcepYRc,6988 +deepspeed/ops/csrc/quantization/quantize_intX.cu,sha256=SIEWB2qHtFkHINoTTNukiJnXnQS06BKVH3Wjtg-im1k,8514 +deepspeed/ops/csrc/quantization/swizzled_quantize.cu,sha256=XPcFFPzml6yWQudLhCbPlTBrx2Gef7HjySytUwJKvJs,8461 +deepspeed/ops/csrc/random_ltd/gather_scatter.cu,sha256=YFiLiYlgkq1RNYS8yRNI3CKYUrv_8ht0lsvZsUcsUYg,8408 +deepspeed/ops/csrc/random_ltd/pt_binding.cpp,sha256=7NAanMKS-j_hBputc6Ltv1yH6-c7xLwhDZJUfLiH--M,9590 +deepspeed/ops/csrc/random_ltd/slice_attn_masks.cu,sha256=KqANgsPTAAHyys0cPh3023JaBF8b0kOKF-W13W39Q_4,5076 +deepspeed/ops/csrc/random_ltd/token_sort.cu,sha256=yJTRkhVI-S_fT9ikw3VIFsEV6Gl0LxxtDI5oRls6dhU,7027 +deepspeed/ops/csrc/sparse_attention/utils.cpp,sha256=HmNSJfE5WzsE3dYSWjOKOAxawUOcgZ2JwQpJSjvB3b4,4523 +deepspeed/ops/csrc/spatial/csrc/opt_bias_add.cu,sha256=8N-FAKh-6Fwu7byz6e_sjXpd8XCRwdFntbZzZL6H8x8,6273 +deepspeed/ops/csrc/spatial/csrc/pt_binding.cpp,sha256=K2zwA7EXdtjM05P9Bl8YV_p35TQXrt4xrADEkq44Dlo,3863 +deepspeed/ops/csrc/spatial/includes/spatial_cuda_layers.h,sha256=dPh5gJbsCh2gB4_g9PSzMb1zN6LJvAGLfD7Vw_08_mc,915 +deepspeed/ops/csrc/transformer/cublas_wrappers.cu,sha256=Zt4FUGO24jjTQ5XvE7YV7H05pzOwe0smKPQIMmA8Y9g,17572 +deepspeed/ops/csrc/transformer/dropout_kernels.cu,sha256=u6U7RuMioRS_aixeRL0aGvU3r7exZ-EFeE7Etgg66IM,29835 +deepspeed/ops/csrc/transformer/ds_transformer_cuda.cpp,sha256=sztj1XsEX1rbeq2lovNbYf_jZ-ExLL9qB-TLVRP_1D4,47582 +deepspeed/ops/csrc/transformer/gelu_kernels.cu,sha256=f1emIwHWorXkuIqaB9kD93L4HjzSCcQ2JL7GlE6hh6s,12191 +deepspeed/ops/csrc/transformer/general_kernels.cu,sha256=TzoOK2TqstG0MX9swk0UptK2zBgkpYwo99PeY9j0Fns,14520 +deepspeed/ops/csrc/transformer/inference/csrc/apply_rotary_pos_emb.cu,sha256=jR_GJ9r9T90UAscJKFkklP8vzWKZ433gsJBsZUp5HB0,8558 +deepspeed/ops/csrc/transformer/inference/csrc/dequantize.cu,sha256=z7GzRO9XUJFb0DNPr7luWML9cJTYOZrR5wAQGeuStqY,4916 +deepspeed/ops/csrc/transformer/inference/csrc/gelu.cu,sha256=uB5npZROyc4mk1yjtipCo2OurQsZvcsfMP1ey9nfVuo,28649 +deepspeed/ops/csrc/transformer/inference/csrc/layer_norm.cu,sha256=79gLSVFW19gsgFQNezyITbBImha7_oXJo5NfBloOhzU,20883 +deepspeed/ops/csrc/transformer/inference/csrc/pointwise_ops.cu,sha256=tJP7V172fEHn86xtO1H8lKzBS7CToDNofJBnOVhivEA,2476 +deepspeed/ops/csrc/transformer/inference/csrc/pt_binding.cpp,sha256=_C-hVbsnW55taSjiY5XacU_DCGq13kkLXWB94AqtIEM,87552 +deepspeed/ops/csrc/transformer/inference/csrc/relu.cu,sha256=F_W6D8eeHtm_btn-FTyAV8FnLTDJ9H8mpkDBWpmxxDc,2318 +deepspeed/ops/csrc/transformer/inference/csrc/rms_norm.cu,sha256=0vHKVYfQQ8w-qLUD-ihfCd0jWDQ_N03QuQu0rimHrdU,10251 +deepspeed/ops/csrc/transformer/inference/csrc/softmax.cu,sha256=GY3hRUkY59BcQqe2BA0QCGw3tlN2UO4GQipRezSzJLM,27245 +deepspeed/ops/csrc/transformer/inference/csrc/transform.cu,sha256=R0PAaY2hEyAq9_xMYDrsHRgzIXznZuI48jgU4tDtm_w,31659 +deepspeed/ops/csrc/transformer/inference/includes/inference_context.h,sha256=2-KQOUFuWdx1rOLR3rZtTmOpz9LUmY9Vs7a5qNybUzI,10531 +deepspeed/ops/csrc/transformer/inference/includes/inference_cublas_wrappers.h,sha256=K1U00bmeKbeLzmAlnBGoJegOZsAulJ1vlL90IEAEvOs,18517 +deepspeed/ops/csrc/transformer/inference/includes/inference_cuda_layers.h,sha256=TlXSYeyoKSWBaxuN_dDxrTMhutjQocPjKeGL_NPXIPo,9057 +deepspeed/ops/csrc/transformer/normalize_kernels.cu,sha256=u9fnXA77eqpoC1hri_w0IzC0xW7_OIG5Cs6F_UFnivs,74900 +deepspeed/ops/csrc/transformer/softmax_kernels.cu,sha256=GKmqpxpbAFlhM2p1SFMZ02phEpYMSGUFA2J9ukLovno,26758 +deepspeed/ops/csrc/transformer/transform_kernels.cu,sha256=Ob-Hae1q-KZmDxaji0Ci0nHyNuRYDvPFBF_yrnB8ZoE,22709 +deepspeed/ops/csrc/utils/flatten_unflatten.cpp,sha256=RdtrXrJ_BiE-p3nygS3JW_vhzBGCHc5_CL7KY7jY1e8,788 +deepspeed/ops/csrc/xpu/adagrad/cpu_adagrad.cpp,sha256=-LiTifCTt4Pr_4miMyP5-8W84FHhORWZn8mx-VZDR58,6814 +deepspeed/ops/csrc/xpu/adam/cpu_adam.cpp,sha256=bDny-11rq18NQoRXkW62zQYTnCCCoqiXPTot3TeK3fI,535 +deepspeed/ops/csrc/xpu/adam/cpu_adam_impl.cpp,sha256=WaviJDpZ1fwVsXNpAF3hmvtJ5eVSaPj1i816i6eNW24,8189 +deepspeed/ops/csrc/xpu/adam/fused_adam_frontend.cpp,sha256=tu6pBcAvqcdh1ti6SB7fj8HnqFDQg3Rqbaa4xsQ2v7M,880 +deepspeed/ops/csrc/xpu/adam/multi_tensor_adam.dp.cpp,sha256=Du_BWrHEoYqoCfDb6qKxDeKXP69ZfdFEW1mt7ir5ah0,6595 +deepspeed/ops/csrc/xpu/common/custom_cuda_kernel.dp.cpp,sha256=fSy_UoyFgDlpKWEDRtUqwrhaLMMUV1OG7eCVZfzcGKs,3525 +deepspeed/ops/csrc/xpu/includes/compat.h,sha256=5ivuTerIqiCaYu2HpRPzJcUtqMsAtRB7aOXel69HeZQ,336 +deepspeed/ops/csrc/xpu/includes/cpu_adagrad.h,sha256=7yZNV7T38Yvnv2JLfcdm4KdBuXyPdvT-7Plzva_5BbQ,3951 +deepspeed/ops/csrc/xpu/includes/cpu_adam.h,sha256=3a0twZXzUUnAhrEymYF8gKFoVW594B-I-Snjg6paK9Q,7956 +deepspeed/ops/csrc/xpu/includes/simd.h,sha256=8wzoQAfIKLyUi-1V7o_kX3e1Hy3ye5QfH-XOz3dlDP8,6586 +deepspeed/ops/csrc/xpu/includes/type_shim.h,sha256=hu3RINbuYEW4G4tTdiC9KifB0KdYzQVQYmXO4Buntis,7779 +deepspeed/ops/deepspeed4science/__init__.py,sha256=LRmYuJYPcnqiyIuSCwsvqpD61wcScSmS7An8zq0UY4M,175 +deepspeed/ops/deepspeed4science/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/deepspeed4science/__pycache__/evoformer_attn.cpython-310.pyc,, +deepspeed/ops/deepspeed4science/evoformer_attn.py,sha256=50z8I1w4lJww3bRiBxq8PfEJxcacZw_qXk4DyTyWsSQ,4121 +deepspeed/ops/lamb/__init__.py,sha256=HG9WeYIi4tTtKZBuLuJNIVpLSK75djGxjXDkrgfreEk,130 +deepspeed/ops/lamb/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/lamb/__pycache__/fused_lamb.cpython-310.pyc,, +deepspeed/ops/lamb/fused_lamb.py,sha256=gZbjO38b517Ii1tEJwT0O81YwJ5bXtflM2uq57NFXtI,7815 +deepspeed/ops/lion/__init__.py,sha256=EFF2vXYA9hYPNuqBsLsd1MTFCPiOFfW1ZWBrlbxfbD8,169 +deepspeed/ops/lion/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/lion/__pycache__/cpu_lion.cpython-310.pyc,, +deepspeed/ops/lion/__pycache__/fused_lion.cpython-310.pyc,, +deepspeed/ops/lion/__pycache__/multi_tensor_apply.cpython-310.pyc,, +deepspeed/ops/lion/cpu_lion.py,sha256=XOTMpFLCJ7tLmwFIIb6dsayTLKhJZriHAkIlzeMjGgU,6201 +deepspeed/ops/lion/fused_lion.py,sha256=QoYzriRhYkaa_bDxIY_YGviCUe-BcNZvvNtFA4g5kLc,5548 +deepspeed/ops/lion/multi_tensor_apply.py,sha256=APt3UCnfw-nLjJOGWXC3izWRXNu-16TYhB-s213DpM0,429 +deepspeed/ops/op_builder/__init__.py,sha256=HJ00U1SrhiXDftTIq9k5hZp-CuRPZ5GWOyQerqgC21U,2005 +deepspeed/ops/op_builder/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/all_ops.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/async_io.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/builder.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/cpu_adagrad.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/cpu_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/cpu_lion.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/evoformer_attn.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/fused_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/fused_lamb.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/fused_lion.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/inference_core_ops.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/inference_cutlass_builder.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/quantizer.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/ragged_ops.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/ragged_utils.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/random_ltd.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/sparse_attn.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/spatial_inference.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/stochastic_transformer.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/transformer.cpython-310.pyc,, +deepspeed/ops/op_builder/__pycache__/transformer_inference.cpython-310.pyc,, +deepspeed/ops/op_builder/all_ops.py,sha256=TwPAF3R6rufNGz2WAO7lrgW0REng1V6uWF1yZ2nrplY,1180 +deepspeed/ops/op_builder/async_io.py,sha256=w_fGQCKTz8qD_1Z8b8XW5IjnoUlhc79Ui_sBrBkaD48,3661 +deepspeed/ops/op_builder/builder.py,sha256=m5skdFvaH8LtC6Nfrbdn409a0mQSixoR1da5xv2HMt8,31147 +deepspeed/ops/op_builder/cpu/__init__.py,sha256=vUsb1NiBIkOKF_qFCy3hpbs725dnaLCvTN4sjwlniLU,295 +deepspeed/ops/op_builder/cpu/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/op_builder/cpu/__pycache__/builder.cpython-310.pyc,, +deepspeed/ops/op_builder/cpu/__pycache__/comm.cpython-310.pyc,, +deepspeed/ops/op_builder/cpu/__pycache__/cpu_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/cpu/__pycache__/fused_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/cpu/__pycache__/no_impl.cpython-310.pyc,, +deepspeed/ops/op_builder/cpu/builder.py,sha256=8XZWnoY1-ldVsE2N_Pyk6N9airTygcKjh-axw3cEn6o,1315 +deepspeed/ops/op_builder/cpu/comm.py,sha256=b7sLY1dLVaJuAEdZWjkXDtOY_oUgcU0s_OzuOQ2WeDY,1285 +deepspeed/ops/op_builder/cpu/cpu_adam.py,sha256=rGtsjwhzzP9iMDIphGxjh5TLUBtUiEwtrnf5x5BbG08,625 +deepspeed/ops/op_builder/cpu/fused_adam.py,sha256=5LAQSEL2H4Bcc1b7LArqKCcF2Ztuq2uaABmRmI7m5Ss,546 +deepspeed/ops/op_builder/cpu/no_impl.py,sha256=OIZ3aVWXK9W8iRtgNL0pAtl033WV0hEoOuPP35UuFUc,616 +deepspeed/ops/op_builder/cpu_adagrad.py,sha256=OWfsh2f07IuRQdLgomSv7EevhAdqZ1TxDsEwifhrJDE,1151 +deepspeed/ops/op_builder/cpu_adam.py,sha256=QlPrfAEmGq5_iSZiRnF_GZxzN8I45CH5znSmlSN3qtk,1190 +deepspeed/ops/op_builder/cpu_lion.py,sha256=hEf4hIAxEzNN1beGGEF7BxrbzGLNb66TWVEAIUXRkts,1459 +deepspeed/ops/op_builder/evoformer_attn.py,sha256=Qpyf6UeP1FmSVdxMgDUEEI0Xcdj8UrhwAiTbX25zRbs,2728 +deepspeed/ops/op_builder/fused_adam.py,sha256=IC87eCjvRPJ2ETVdQUv0-d_yxMXhDJvbPML1AEU9cSQ,1044 +deepspeed/ops/op_builder/fused_lamb.py,sha256=Ojx9euSMUeeU7DVIFxE11w_fHERqYsZxNNtaGxnAAag,1216 +deepspeed/ops/op_builder/fused_lion.py,sha256=1ZJh92_chzH0Jupt8RO8GUOnrm6ACKLECZ5mTiV7WI0,1044 +deepspeed/ops/op_builder/hpu/__init__.py,sha256=fCgjMg2OsK3qA9Yg6waIA_YCcCzRYNIJiUyMCdrmuvQ,318 +deepspeed/ops/op_builder/hpu/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/op_builder/hpu/__pycache__/builder.cpython-310.pyc,, +deepspeed/ops/op_builder/hpu/__pycache__/cpu_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/hpu/__pycache__/fused_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/hpu/__pycache__/no_impl.cpython-310.pyc,, +deepspeed/ops/op_builder/hpu/builder.py,sha256=2Haj41Q599cfD3J4kAawyAI734knv1hXf4fu1ZLjTbQ,1371 +deepspeed/ops/op_builder/hpu/cpu_adam.py,sha256=vqGXAKkkORAcvFUWOvS0QCvL6EnIGvyfUrXdg-EulMc,798 +deepspeed/ops/op_builder/hpu/fused_adam.py,sha256=uv1k6BHKwET0NNl2FqI5AOr6BL1rN7cA6-qRKuFEyVE,719 +deepspeed/ops/op_builder/hpu/no_impl.py,sha256=cUJ87-4-I46CK3GmS-Ve52nzfMhcFayT_YZF5FWEmzY,616 +deepspeed/ops/op_builder/inference_core_ops.py,sha256=Kai4H6Hrc2Jy2DIzMzDVwAWElZINHlEfYasyuHPrjmQ,4163 +deepspeed/ops/op_builder/inference_cutlass_builder.py,sha256=IPAcnhCzztQR3t8Y-A9mWiW_wy7YgbfK5HAzs3DHoFg,3250 +deepspeed/ops/op_builder/npu/__init__.py,sha256=utWarSvvEuL_LUk8-i4Kx2oCHJRemddoGeGfzHOCsTE,419 +deepspeed/ops/op_builder/npu/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/async_io.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/builder.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/cpu_adagrad.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/cpu_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/cpu_lion.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/fused_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/inference.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/__pycache__/no_impl.cpython-310.pyc,, +deepspeed/ops/op_builder/npu/async_io.py,sha256=lyBqA14EWTMRcuflHJuqun8KcFebD4u4RMRM1wGlm28,3814 +deepspeed/ops/op_builder/npu/builder.py,sha256=k21-MqhnSCYyMpuOxbRjDWbsIcnDnA6GxWPtsmtBuC8,3200 +deepspeed/ops/op_builder/npu/cpu_adagrad.py,sha256=CM4wGBKhOHayOyzvOGX2yDIh4PgOCu6AUHXF4GKq9kE,581 +deepspeed/ops/op_builder/npu/cpu_adam.py,sha256=S5V76jfADBzNNP-CyBoTXZSWrQMLYWuos6ACygoumDE,594 +deepspeed/ops/op_builder/npu/cpu_lion.py,sha256=rDkiA8vVCMolVFWUnvN9PkrLOJlc6P_rSRNYmOUrYV8,594 +deepspeed/ops/op_builder/npu/fused_adam.py,sha256=be-Lvt8CA8gf5oLtcg9BDqTupJ7lHT5O8PBfoNvkTns,2130 +deepspeed/ops/op_builder/npu/inference.py,sha256=-IsUpVW106uskkegCrYzyMoed5W-RhhffawBWsXYad8,15187 +deepspeed/ops/op_builder/npu/no_impl.py,sha256=qZnbySBzMLLUrMlZV656Pdh8TGfEZKNACKdmD6uT9ho,755 +deepspeed/ops/op_builder/quantizer.py,sha256=k0kTBN9_9Ri_dyzhW_ENxYpUVhiFOeHaIdVY1W50ZPE,1037 +deepspeed/ops/op_builder/ragged_ops.py,sha256=sxNN18RvM7Lg6iz5uKlN4oHZHepPCnhhhBL1Bc3qr0g,4789 +deepspeed/ops/op_builder/ragged_utils.py,sha256=Dw_6CzGGmp3I5fR1LdOHUlTBX6SiaWgX-Oo3e-TEsso,2604 +deepspeed/ops/op_builder/random_ltd.py,sha256=zNBXnNjckT9dZqtj4pVYHcSUu4m2govven5qh5Qp9VU,879 +deepspeed/ops/op_builder/sparse_attn.py,sha256=bUJa7uBe6ArfQe4MubDF6-n05ChVzk-PLsFY5Rn3PK4,2994 +deepspeed/ops/op_builder/spatial_inference.py,sha256=qOtjRrjWLjzw0xu-lA0otlOwd_bdub76ZzvOUqyHQcY,1534 +deepspeed/ops/op_builder/stochastic_transformer.py,sha256=nQPCaLrtuHVtJxMWIQp_8IT1iQ2XS9nQ1BJVwwAeIPI,565 +deepspeed/ops/op_builder/transformer.py,sha256=wyrj-FZTKPyXzwKOGPYDyic3w5hdtXh4dWJS_wfRdCU,1094 +deepspeed/ops/op_builder/transformer_inference.py,sha256=kGNIBTWER2h4EdUkRmfpsX4bx--fTiHywOVEgoMcYZY,2745 +deepspeed/ops/op_builder/xpu/__init__.py,sha256=h2v1i-z4hyP8NGnyt0Fjjn_mX5p_iWC1euckaIoDcOo,254 +deepspeed/ops/op_builder/xpu/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/op_builder/xpu/__pycache__/async_io.cpython-310.pyc,, +deepspeed/ops/op_builder/xpu/__pycache__/builder.cpython-310.pyc,, +deepspeed/ops/op_builder/xpu/__pycache__/cpu_adagrad.cpython-310.pyc,, +deepspeed/ops/op_builder/xpu/__pycache__/cpu_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/xpu/__pycache__/fused_adam.cpython-310.pyc,, +deepspeed/ops/op_builder/xpu/async_io.py,sha256=Qy2AMkfPvyLw6J5JlReBt05j1iLwdxjHSKdLUjVH_Qg,3566 +deepspeed/ops/op_builder/xpu/builder.py,sha256=zQOT2O5KxkIN-0NQeucBGdEHEGaJeXgMua8HQ8Fs_XU,5692 +deepspeed/ops/op_builder/xpu/cpu_adagrad.py,sha256=bi9U6d3UmHRYw7qkIcRNRYTuasPYoSe38ngF71ZM1RQ,576 +deepspeed/ops/op_builder/xpu/cpu_adam.py,sha256=wPrCTl-ubqi_JPwKBMnCupqh4cEYmQFUAckqqCnxSh4,744 +deepspeed/ops/op_builder/xpu/fused_adam.py,sha256=yEvt078k2gqqqDjPG_D5I5Ntuz_aAqEjYgtCgjpgAjg,701 +deepspeed/ops/quantizer/__init__.py,sha256=5IdLoKmcCi6MuZNNTGADQs88dgcCUdN50WofVXDcvpI,132 +deepspeed/ops/quantizer/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/quantizer/__pycache__/quantizer.cpython-310.pyc,, +deepspeed/ops/quantizer/quantizer.py,sha256=z3dh0MRUklHnR74y662k9H31FOMU0buQOGE9uPLcUZA,1193 +deepspeed/ops/random_ltd/__init__.py,sha256=MC02456CIFtrgIYKClskYz4kDnQ9X7zRyyKNp110l58,191 +deepspeed/ops/random_ltd/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/random_ltd/__pycache__/dropping_utils.cpython-310.pyc,, +deepspeed/ops/random_ltd/dropping_utils.py,sha256=tHMRvk6WIVDoPR58HWc0OwUNzXhfn8VYd7Xw13X-r5o,4902 +deepspeed/ops/sparse_attention/__init__.py,sha256=qhwanCYxLO9eaH1cx52E4LNe_An8Nrja_8WyyoYk42I,467 +deepspeed/ops/sparse_attention/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/sparse_attention/__pycache__/bert_sparse_self_attention.cpython-310.pyc,, +deepspeed/ops/sparse_attention/__pycache__/matmul.cpython-310.pyc,, +deepspeed/ops/sparse_attention/__pycache__/softmax.cpython-310.pyc,, +deepspeed/ops/sparse_attention/__pycache__/sparse_attention_utils.cpython-310.pyc,, +deepspeed/ops/sparse_attention/__pycache__/sparse_self_attention.cpython-310.pyc,, +deepspeed/ops/sparse_attention/__pycache__/sparsity_config.cpython-310.pyc,, +deepspeed/ops/sparse_attention/bert_sparse_self_attention.py,sha256=YfQPUqCgd3XmgT7HVb4wXkdZRDBfHinxx0mwQssILww,3463 +deepspeed/ops/sparse_attention/matmul.py,sha256=RYTMwGqaYRUTR7DC60Ek7PNWUc5n9sKJNgcTpzmiZfo,32948 +deepspeed/ops/sparse_attention/softmax.py,sha256=B_q4n2TP29iPfMLTnkRqgHxJ8y5cBjsTixPOO7zrrMw,11322 +deepspeed/ops/sparse_attention/sparse_attention_utils.py,sha256=XlNgnI3CdG8Kur-Ac1O5qJVQRX_lgexTzXtfarMAHb4,12300 +deepspeed/ops/sparse_attention/sparse_self_attention.py,sha256=Mgs_4NaoBoHIghomqaqnKtjbAgYs1xWlZOlZAoSaMao,6746 +deepspeed/ops/sparse_attention/sparsity_config.py,sha256=eUCYxt5QPHWXdot-Myu206H9Y5WV8HrqxvO7LPKHHKg,42463 +deepspeed/ops/sparse_attention/trsrc/__init__.py,sha256=2G0yT0H4-bH6beifS-DoGK8lQcfZ2aIqt7IFekVRxP0,1032 +deepspeed/ops/sparse_attention/trsrc/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/sparse_attention/trsrc/matmul.tr,sha256=nM6iQGzBXyKFzHqmCDjpr8xmykwEls4Te6PWVJg22Dg,6628 +deepspeed/ops/sparse_attention/trsrc/softmax_bwd.tr,sha256=C--cFHczPdwJA7wm0s66p9f_nkzDMatqu3-iBQoAlog,1923 +deepspeed/ops/sparse_attention/trsrc/softmax_fwd.tr,sha256=t4bvel9w4ilPsn24DQgzIqGNG08Pt4pfCBFua8rj9fc,4047 +deepspeed/ops/transformer/__init__.py,sha256=ADlxWPujgq5jT_XL8yuRtTg9jdQiBK-5ydzgnNwap-o,413 +deepspeed/ops/transformer/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/transformer/__pycache__/transformer.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__init__.py,sha256=ENKx17yjSD2dIqD6kPq-zxY-Xae-O84-ZPGGNeFq064,315 +deepspeed/ops/transformer/inference/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/bias_add.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/config.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/diffusers_2d_transformer.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/diffusers_attention.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/diffusers_transformer_block.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/ds_attention.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/ds_mlp.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/moe_inference.cpython-310.pyc,, +deepspeed/ops/transformer/inference/__pycache__/triton_ops.cpython-310.pyc,, +deepspeed/ops/transformer/inference/bias_add.py,sha256=x1gk_iN4pmFNsSt6jwtBXil-3no4yDhTMbi_6yrs7gY,876 +deepspeed/ops/transformer/inference/config.py,sha256=h6rjANYuBs58JnzsgQc5IJ4cqV9ZbqVWaeI8CeW3KIk,5987 +deepspeed/ops/transformer/inference/diffusers_2d_transformer.py,sha256=JVSE9B60n6XHeg37zE26W3-jzKOhxiOtJkmkCiM1nDU,236 +deepspeed/ops/transformer/inference/diffusers_attention.py,sha256=PCWEGMJj4M74TGD-uRKUiTnk1Nl9gBRSNjRsP-2iaVU,9922 +deepspeed/ops/transformer/inference/diffusers_transformer_block.py,sha256=sbwG5Dw6I-ePwBCY84S2dQE2u83NgEpky3S6s-MRdwE,4857 +deepspeed/ops/transformer/inference/ds_attention.py,sha256=fDn36zFd_i_5J-9nOSZkvB6RvO3svhm23_X7_R4xWiw,14355 +deepspeed/ops/transformer/inference/ds_mlp.py,sha256=Lr5q6pk3MrAx8D36wXhTNDlV7MsFMkFFaHb7CiKzfzo,6294 +deepspeed/ops/transformer/inference/moe_inference.py,sha256=QJvMHchypsaiis3O0SJHLy1HdwHZZirZ7XY4ePwMl8g,18540 +deepspeed/ops/transformer/inference/op_binding/__init__.py,sha256=JhdbkMuKVwM62BEU4aI4OscG3GGkxFKsvnttFx9vc9k,382 +deepspeed/ops/transformer/inference/op_binding/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/base.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/gelu_gemm.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/linear.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/mlp_gemm.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/qkv_gemm.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/residual_add.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/softmax.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/softmax_context.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/__pycache__/vector_matmul.cpython-310.pyc,, +deepspeed/ops/transformer/inference/op_binding/base.py,sha256=YN1s4f-7BKDmvCyKkVW3DOaXz40Xxk8FrIeDU2BfmJA,536 +deepspeed/ops/transformer/inference/op_binding/gelu_gemm.py,sha256=gY_aA6ni_ckPawkM-PHPeS2dvqcH3Vvarv8ao5sv3ug,1867 +deepspeed/ops/transformer/inference/op_binding/linear.py,sha256=vIYuAmj9CX0KERm9_IE7pdmf4XG4UPXkanBqqtlnhgc,2719 +deepspeed/ops/transformer/inference/op_binding/mlp_gemm.py,sha256=j8XYzjGoY0WL1bLxgksx9FQgDZencRfSIsnUK4Xngy8,4680 +deepspeed/ops/transformer/inference/op_binding/qkv_gemm.py,sha256=hCtzmUtJxloLpOVgMM005wRt6MN3YYLnId04urbKGDs,4425 +deepspeed/ops/transformer/inference/op_binding/residual_add.py,sha256=3_KAcHwhNCnPcH6wjds3tjzTB9pVGBdRCr8JI2z1wQM,2708 +deepspeed/ops/transformer/inference/op_binding/softmax.py,sha256=QO9JLpF7VurtMMX6wNDqvBtVekh8AqrwMI2S586Q3lg,2460 +deepspeed/ops/transformer/inference/op_binding/softmax_context.py,sha256=jCekTFEGhztLY7NjeavZmdQDefbixwuK1to86Opa7Vw,2238 +deepspeed/ops/transformer/inference/op_binding/vector_matmul.py,sha256=NxbnOLMAouYkGjJc0R6-PmUmGQ11r64ioZG1Do57YPU,2750 +deepspeed/ops/transformer/inference/triton/__init__.py,sha256=FVRc5cfjotwsFtcB7gkAm09lXFbNl01gaoet67x4B_k,350 +deepspeed/ops/transformer/inference/triton/__pycache__/__init__.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/attention.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/gelu.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/layer_norm.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/matmul_ext.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/mlp.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/ops.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/residual_add.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/softmax.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/__pycache__/triton_matmul_kernel.cpython-310.pyc,, +deepspeed/ops/transformer/inference/triton/attention.py,sha256=fZ8lN_UuLntv1fsSGItfRwIWKbemJeAAY8tDAkvVgo0,15922 +deepspeed/ops/transformer/inference/triton/gelu.py,sha256=OAViD-qU6B50KhjQgg-Y3leuEKP6gW2OkX7MX_VGDko,1152 +deepspeed/ops/transformer/inference/triton/layer_norm.py,sha256=q5Xt2ov4z5DonYRBnWevaUYTFwJU5qIrJc6RVDtLaPw,7512 +deepspeed/ops/transformer/inference/triton/matmul_ext.py,sha256=DXxGNIVX3dJqIIkN4nxgw34LuD0gyp2-0Iko4qAKHHM,14622 +deepspeed/ops/transformer/inference/triton/mlp.py,sha256=c4yRQv3GPzEKcaxMFwkms4q2eF47ewemkYcnuhu0iec,4225 +deepspeed/ops/transformer/inference/triton/ops.py,sha256=KXcto03GshM-0UcYtRQK7Yo8LR68Um3LX1zpVoPvkw0,4125 +deepspeed/ops/transformer/inference/triton/residual_add.py,sha256=PYcAlDC8yz00LzJ8eXsJKoMmxjD_nMM0X5PbK8w_J70,3118 +deepspeed/ops/transformer/inference/triton/softmax.py,sha256=2q_qNmffFM4l2UgsAFKrJmAVrID95KK1rcg5dEzBmco,3208 +deepspeed/ops/transformer/inference/triton/triton_matmul_kernel.py,sha256=8pa5U6ZJphYxJ3oX2cPdxdYilmuKf8w_lvt1A8-pjbg,12570 +deepspeed/ops/transformer/inference/triton_ops.py,sha256=lR82VswoMZ9b2Axbck_eF5GCOczUmvKTDAT1W8bVH1w,5487 +deepspeed/ops/transformer/transformer.py,sha256=1UMs9JNjlK3mD5PJi3iER9QARUO2_a5h0sZAY0Rmo24,20600 +deepspeed/pipe/__init__.py,sha256=ddCnO6IbTpGzlAIgfEbGg6RPCz70HNN-MwTjCpTsqLI,164 +deepspeed/pipe/__pycache__/__init__.cpython-310.pyc,, +deepspeed/profiling/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/profiling/__pycache__/__init__.cpython-310.pyc,, +deepspeed/profiling/__pycache__/config.cpython-310.pyc,, +deepspeed/profiling/__pycache__/constants.cpython-310.pyc,, +deepspeed/profiling/config.py,sha256=OzJuTtxVvm62T1iA5eXocvP8xQQXHNtzcCdqISLTTKM,1959 +deepspeed/profiling/constants.py,sha256=QaZFzozkotv71aArhiwtQJ13DNDGDU1xC0Jm0KrVH08,1243 +deepspeed/profiling/flops_profiler/__init__.py,sha256=NCZv_Ktz4sFmmfpBwGKVVbbKrI36VQHwPapvLlWUUxE,120 +deepspeed/profiling/flops_profiler/__pycache__/__init__.cpython-310.pyc,, +deepspeed/profiling/flops_profiler/__pycache__/profiler.cpython-310.pyc,, +deepspeed/profiling/flops_profiler/profiler.py,sha256=wnOWearI4j6ormmB4QcpCfwED7w0xgl8atMYuDo637s,50350 +deepspeed/pydantic_v1.py,sha256=p6m4eFg-1jIrEkzbpehZ-DFy2b_JvtZbuORSTnPWPYQ,547 +deepspeed/runtime/__init__.py,sha256=YMiorSUmE3jGv-2-shdt4QRfQmcI6KkAq8dfY33L_74,192 +deepspeed/runtime/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/__pycache__/bf16_optimizer.cpython-310.pyc,, +deepspeed/runtime/__pycache__/compiler.cpython-310.pyc,, +deepspeed/runtime/__pycache__/config.cpython-310.pyc,, +deepspeed/runtime/__pycache__/config_utils.cpython-310.pyc,, +deepspeed/runtime/__pycache__/constants.cpython-310.pyc,, +deepspeed/runtime/__pycache__/dataloader.cpython-310.pyc,, +deepspeed/runtime/__pycache__/eigenvalue.cpython-310.pyc,, +deepspeed/runtime/__pycache__/engine.cpython-310.pyc,, +deepspeed/runtime/__pycache__/hybrid_engine.cpython-310.pyc,, +deepspeed/runtime/__pycache__/lr_schedules.cpython-310.pyc,, +deepspeed/runtime/__pycache__/progressive_layer_drop.cpython-310.pyc,, +deepspeed/runtime/__pycache__/quantize.cpython-310.pyc,, +deepspeed/runtime/__pycache__/sparse_tensor.cpython-310.pyc,, +deepspeed/runtime/__pycache__/state_dict_factory.cpython-310.pyc,, +deepspeed/runtime/__pycache__/utils.cpython-310.pyc,, +deepspeed/runtime/__pycache__/weight_quantizer.cpython-310.pyc,, +deepspeed/runtime/activation_checkpointing/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/activation_checkpointing/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/activation_checkpointing/__pycache__/checkpointing.cpython-310.pyc,, +deepspeed/runtime/activation_checkpointing/__pycache__/config.cpython-310.pyc,, +deepspeed/runtime/activation_checkpointing/checkpointing.py,sha256=lsGBDGwGbHCTNGKtF1o-qY5Ou10u5XBrUBioEcsyD8I,45235 +deepspeed/runtime/activation_checkpointing/config.py,sha256=YQkhHYT6Qr2GJl_KusLU0pUJ-rbLLKKj0Zo7LkeZkAg,3988 +deepspeed/runtime/bf16_optimizer.py,sha256=MCjTBlAQIdWhVPlSfzcRw_0fzgljY3D9id6xKRJbkoI,21913 +deepspeed/runtime/checkpoint_engine/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/checkpoint_engine/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/checkpoint_engine/__pycache__/checkpoint_engine.cpython-310.pyc,, +deepspeed/runtime/checkpoint_engine/__pycache__/nebula_checkpoint_engine.cpython-310.pyc,, +deepspeed/runtime/checkpoint_engine/__pycache__/torch_checkpoint_engine.cpython-310.pyc,, +deepspeed/runtime/checkpoint_engine/checkpoint_engine.py,sha256=rPBAEEhDvuvoB1FRmBT5LdG95-lx4KdVd83wwxC1nc4,653 +deepspeed/runtime/checkpoint_engine/nebula_checkpoint_engine.py,sha256=uDA0iiprvyJZszXG98MV4RM48aWUHAnhLgRVFuxOp8s,4975 +deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py,sha256=amrlwu4rI_RLGjYBtw11tasdUWQsgbSeURU4QTkmW2Q,1060 +deepspeed/runtime/comm/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/comm/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/comm/__pycache__/coalesced_collectives.cpython-310.pyc,, +deepspeed/runtime/comm/__pycache__/hccl.cpython-310.pyc,, +deepspeed/runtime/comm/__pycache__/mpi.cpython-310.pyc,, +deepspeed/runtime/comm/__pycache__/nccl.cpython-310.pyc,, +deepspeed/runtime/comm/coalesced_collectives.py,sha256=1E5b3LRMvfZOOY6LDw2vVwEaZrgCXhxKsEnWCESNcuY,7004 +deepspeed/runtime/comm/hccl.py,sha256=hAkDzjlwVwCu9QNN6mLoKMGNHklfvErUKTRIOZ4P6BY,5169 +deepspeed/runtime/comm/mpi.py,sha256=m3-XVeEL6F5ln5krGaExKcBj1Qlxg7y_XwvvVo8APcU,10076 +deepspeed/runtime/comm/nccl.py,sha256=ATT1x4uL5YOHU_E4v67zxoQAtqTxZgiVHhmEyoi4zVw,7586 +deepspeed/runtime/compiler.py,sha256=--Jrt_ZCYBeZah2kb1pJoj8Dz6YOXDL6T0UHrPYUXec,5510 +deepspeed/runtime/compression/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/compression/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/compression/__pycache__/cupy.cpython-310.pyc,, +deepspeed/runtime/compression/cupy.py,sha256=5k1odBHWg9awK2tY1jsLmUDC22VE8hucw_MV0YgnABs,701 +deepspeed/runtime/config.py,sha256=Cortdc_XTpHJa28NpYoX3H0y_QOKb37KlJU73_Xsb94,41583 +deepspeed/runtime/config_utils.py,sha256=UNoTCKdYWkIozMFxydzOSLbvwPPto09B_cL37TSvrV4,8212 +deepspeed/runtime/constants.py,sha256=rQ3-MjNZKBZ_Ym1GG0GOF0koX9BaxuvJFrpDzw36lAc,14373 +deepspeed/runtime/data_pipeline/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/data_pipeline/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/__pycache__/config.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/__pycache__/constants.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/__pycache__/curriculum_scheduler.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/config.py,sha256=bqUnATEvLWDh7XfQ2RU38QolO6OKgLqLWfoNl9aQoGc,6081 +deepspeed/runtime/data_pipeline/constants.py,sha256=iigt5xCqvkuIccraNU4y22wipAGdviEMRZEI_imVa_o,4701 +deepspeed/runtime/data_pipeline/curriculum_scheduler.py,sha256=g6Heo7I5L8mpyKJXSH8t1SwO2Lxk9jMfOIMbYDksV00,10025 +deepspeed/runtime/data_pipeline/data_routing/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/data_pipeline/data_routing/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_routing/__pycache__/basic_layer.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_routing/__pycache__/helper.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_routing/__pycache__/scheduler.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_routing/__pycache__/utils.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_routing/basic_layer.py,sha256=QvJuYJRbFTZSkKovN5Z7PVQP5o_fNC-2QgYqrWGy8ns,5638 +deepspeed/runtime/data_pipeline/data_routing/helper.py,sha256=mFWBiepSdAAejDPSaIV6sXeKpRlfXoWFGF0CUmiDACY,1282 +deepspeed/runtime/data_pipeline/data_routing/scheduler.py,sha256=8ddlbZJ3RO0btPKmurh3288Lk1CIoMxRWHcrxlVMZkY,4638 +deepspeed/runtime/data_pipeline/data_routing/utils.py,sha256=ZGrHeImPXbVhLwOMQGWowXEK5YpIGXfC8i3RHVF4NAI,955 +deepspeed/runtime/data_pipeline/data_sampling/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/data_pipeline/data_sampling/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_sampling/__pycache__/data_analyzer.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_sampling/__pycache__/data_sampler.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_sampling/__pycache__/indexed_dataset.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_sampling/__pycache__/utils.cpython-310.pyc,, +deepspeed/runtime/data_pipeline/data_sampling/data_analyzer.py,sha256=B_XrbLKnyB9ii5euOgpSPlaHxVmcIFQZ6y9bewT905o,45637 +deepspeed/runtime/data_pipeline/data_sampling/data_sampler.py,sha256=PJNw94Zc0L31DRk4DH9vkXXGvXPMqtisnbF79gFY7UY,19894 +deepspeed/runtime/data_pipeline/data_sampling/indexed_dataset.py,sha256=9YWdU65pFBtSnXo0yIPac8UQs0-_AFrGhXJokCjoZDo,21469 +deepspeed/runtime/data_pipeline/data_sampling/utils.py,sha256=xUeLMBN-M5dU19Gt4cdJjrWivZB-Tyin9HCqu2mWlW8,1756 +deepspeed/runtime/dataloader.py,sha256=rNiiaHzYOU3hp8OAiC2g9tFWPwACJTMhJ_EVrxvP5OU,6977 +deepspeed/runtime/eigenvalue.py,sha256=Zk0AjdzV_v43BHceDkY4h35WyUvmuRgS-j-SzlIQ3r8,5625 +deepspeed/runtime/engine.py,sha256=cAVYFlRplqG93NHiLGBsb_vzJNG46bGrx3-nV8kF72w,169276 +deepspeed/runtime/fp16/__init__.py,sha256=Dzme9x1YQb-Ru4gzsCNsv8r19zkGyqwPwPwPgZzief4,140 +deepspeed/runtime/fp16/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/fp16/__pycache__/fused_optimizer.cpython-310.pyc,, +deepspeed/runtime/fp16/__pycache__/loss_scaler.cpython-310.pyc,, +deepspeed/runtime/fp16/__pycache__/unfused_optimizer.cpython-310.pyc,, +deepspeed/runtime/fp16/fused_optimizer.py,sha256=RweCxkLv8jU_3rXRa66vy2K7y70koHRCHGUXaiGy2xA,19549 +deepspeed/runtime/fp16/loss_scaler.py,sha256=3SjPPKSeRImsKZ-fIQWqoj3Wl7hK3Nz2JnGLGnQaRRI,11492 +deepspeed/runtime/fp16/onebit/__init__.py,sha256=8hnDOpk71O_EekeWgWIJ4CnbtYt67dre4flu5hoES-4,186 +deepspeed/runtime/fp16/onebit/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/fp16/onebit/__pycache__/adam.cpython-310.pyc,, +deepspeed/runtime/fp16/onebit/__pycache__/lamb.cpython-310.pyc,, +deepspeed/runtime/fp16/onebit/__pycache__/zoadam.cpython-310.pyc,, +deepspeed/runtime/fp16/onebit/adam.py,sha256=5KDqSD2fbKVyg3KOcyk-khBmmJuPkUs0ttSxuz0TMTY,15394 +deepspeed/runtime/fp16/onebit/lamb.py,sha256=RtvS9tidjiIMexZBPCO9BeDXEBbTixgfp5OohmvlXV4,23249 +deepspeed/runtime/fp16/onebit/zoadam.py,sha256=YTHNdUjcv_vIkTaP_N1C4fKjlDs3opRDXfTT9BQRFzg,19248 +deepspeed/runtime/fp16/unfused_optimizer.py,sha256=wbokG1MLhG_i_tmFW3TSyqu5__NR1NJdcpjnRI45eTU,17981 +deepspeed/runtime/hybrid_engine.py,sha256=GbXn3APoVhS8rcHvShYNsiA5Rq3ypPF32pIqFVZYDwc,20663 +deepspeed/runtime/lr_schedules.py,sha256=tTm4L_qnlbgiqwZmU9PjCeXED9j-I0XLY_5HlqgjBa4,38923 +deepspeed/runtime/pipe/__init__.py,sha256=4Xc534VEOCSKYVc-ImMFLx5eTmPB_URVgWqweehFsVI,195 +deepspeed/runtime/pipe/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/pipe/__pycache__/engine.cpython-310.pyc,, +deepspeed/runtime/pipe/__pycache__/module.cpython-310.pyc,, +deepspeed/runtime/pipe/__pycache__/p2p.cpython-310.pyc,, +deepspeed/runtime/pipe/__pycache__/schedule.cpython-310.pyc,, +deepspeed/runtime/pipe/__pycache__/topology.cpython-310.pyc,, +deepspeed/runtime/pipe/engine.py,sha256=qChX9d8QkwDaC5xvPzZWeqUeA1QQPwBbCC9zqxDNboM,60851 +deepspeed/runtime/pipe/module.py,sha256=F2z3xT-vjxcMoAOlo1YJ3K7chja6dk0UfWlKRY6B1bs,27590 +deepspeed/runtime/pipe/p2p.py,sha256=213FGhearzDC5TxWHoz0F853FBrWzm50357YXaVA-jo,5477 +deepspeed/runtime/pipe/schedule.py,sha256=ZDzAeTPZTaYt77Wi2KSkycfuwneBGFsa7V6afQAzHo4,15546 +deepspeed/runtime/pipe/topology.py,sha256=SLrWqLHSANPy4LnRS-RzvzhV45l_G0HhG_NGq7sQ6ws,17167 +deepspeed/runtime/progressive_layer_drop.py,sha256=5zb3-BrMbRxYZL5lk1FtvaOknMi31xT1refDkgSMQbQ,1353 +deepspeed/runtime/quantize.py,sha256=OmimrTJV_RoVG8TLVdjcQR91kSatQCNdFkPf3U39fsk,7699 +deepspeed/runtime/sparse_tensor.py,sha256=QBoplFkxSYLqE9Jfj8mN2qNLMMpVpEgpP3nuIsobpRg,2466 +deepspeed/runtime/state_dict_factory.py,sha256=Zp2mJbIySbYgp2cl5pFvLkpROOOqX9eqGnG1b3d0kD0,18177 +deepspeed/runtime/swap_tensor/__init__.py,sha256=4I9UpQ5vMRU5SYSF_dW9FJDEnBq4m_0SuwtVQ92lGaA,95 +deepspeed/runtime/swap_tensor/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/aio_config.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/async_swapper.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/constants.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/optimizer_utils.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/partitioned_optimizer_swapper.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/partitioned_param_swapper.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/pipelined_optimizer_swapper.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/__pycache__/utils.cpython-310.pyc,, +deepspeed/runtime/swap_tensor/aio_config.py,sha256=wqpT7zvdGuXq-FNPdoBsT-OdHfZ2R30VFRGD1vaePeQ,1172 +deepspeed/runtime/swap_tensor/async_swapper.py,sha256=RlwlqBbkHa3nviVT4nrYzZ4s05nPch_dNMHTZcFOdxw,6349 +deepspeed/runtime/swap_tensor/constants.py,sha256=5vyjU9aSe4vzmi7GtDwuayUOSYAfstUfh5-1uZEV17k,596 +deepspeed/runtime/swap_tensor/optimizer_utils.py,sha256=1lnRZh_pEq_GbaTeATuIppDRzcY0XsIRp5E2dkSjZqY,19418 +deepspeed/runtime/swap_tensor/partitioned_optimizer_swapper.py,sha256=vJzlDsvQQ4mD4lNI6unVB-IZGkt0ZmAuNVS0Dum4kyI,9792 +deepspeed/runtime/swap_tensor/partitioned_param_swapper.py,sha256=0tLdMoMYEszj1Dhdfb_o5ePDrgjlCM9WrgIR99HO5ZU,18168 +deepspeed/runtime/swap_tensor/pipelined_optimizer_swapper.py,sha256=CpW6WvgdlokpB3q4Joictot1gy1YoY0Rzw5H4lSVXG8,10820 +deepspeed/runtime/swap_tensor/utils.py,sha256=7ojbp5Y62PpyV1HxyPqzWwqjo-B9Rj4WDAi58OZw0ug,7778 +deepspeed/runtime/utils.py,sha256=ZVxTgOw7AbDFFO2ThaZbwx8Vb_I9lyzhKEbS-MYUJs8,39426 +deepspeed/runtime/weight_quantizer.py,sha256=-aS6hTZnMcc3ckoYaCI6MiHybwy2NZo5AqEZ8rT7G_o,7027 +deepspeed/runtime/zero/__init__.py,sha256=w3MBhv07e9ZvPptWeZdHp2L7E4dehAenVwSxkJ9TPy0,452 +deepspeed/runtime/zero/__pycache__/__init__.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/config.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/contiguous_memory_allocator.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/linear.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/mics.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/mics_utils.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/offload_config.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/parameter_offload.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/partition_parameters.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/partitioned_param_coordinator.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/partitioned_param_profiler.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/stage3.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/stage_1_and_2.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/test.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/tiling.cpython-310.pyc,, +deepspeed/runtime/zero/__pycache__/utils.cpython-310.pyc,, +deepspeed/runtime/zero/config.py,sha256=kllEgYp1AYY-I92nGydGyD6sxghn-36XZDyJY1seoyY,12217 +deepspeed/runtime/zero/contiguous_memory_allocator.py,sha256=r1J-KRJqO_Aa4Rqlsb1cgtHPa4mpnxm_zrd19FzZVQU,10923 +deepspeed/runtime/zero/linear.py,sha256=1AaaHodON4Zp7PZ6XzP14w9efy3j4-oA0LcEdwZ5UvY,7522 +deepspeed/runtime/zero/mics.py,sha256=VBkqj9vOG08ofM4gkFdduMgvt-Mdez58W9DEUJCHXeo,22061 +deepspeed/runtime/zero/mics_utils.py,sha256=GWmBVESUd699i4YjCKScOwAwlwwFGs0si1dmPAeMz60,7500 +deepspeed/runtime/zero/offload_config.py,sha256=yBF288VPaHcWK2xAypWlX64BCZjeaGMz7UEPZjTE8IY,3086 +deepspeed/runtime/zero/parameter_offload.py,sha256=91D5D9Bk189hPPKAM65nix9hcTVZjCyr_zqyz-GtDTs,22081 +deepspeed/runtime/zero/partition_parameters.py,sha256=O-Pr2gMR-cb26gp-cviML9Hmu_T39D4HjUbW8NXBWbs,102267 +deepspeed/runtime/zero/partitioned_param_coordinator.py,sha256=GCcZ090pGiwnfznvTlPfl4rBb8VJt-velUJoNYUjT6A,27420 +deepspeed/runtime/zero/partitioned_param_profiler.py,sha256=xTXgTnu1ew215TluuO2HvP78hZNbqPEhzJBieeOChf8,1801 +deepspeed/runtime/zero/stage3.py,sha256=TJmpCtHvtFq5Eyd6fmmRzwz7_vhlStZy6rrTFwXq6B0,132437 +deepspeed/runtime/zero/stage_1_and_2.py,sha256=94TbOhSgwtw6sglaB2ch22dnEHl5wz5_LsPUUr1tP44,121576 +deepspeed/runtime/zero/test.py,sha256=j7zu8Fnxm5_Wmp-aaUiQicPD1EUL2JGpODeSARFSZVs,2727 +deepspeed/runtime/zero/tiling.py,sha256=Uz6IgvJPAQ4GICXj4E509p_BIDvnczWVtBT7EjZU9rg,11727 +deepspeed/runtime/zero/utils.py,sha256=B_tddnZmjPofPv5QjWnZM8vIeqNSmID66L2VI3m4EIE,5399 +deepspeed/sequence/__init__.py,sha256=4I9UpQ5vMRU5SYSF_dW9FJDEnBq4m_0SuwtVQ92lGaA,95 +deepspeed/sequence/__pycache__/__init__.cpython-310.pyc,, +deepspeed/sequence/__pycache__/layer.cpython-310.pyc,, +deepspeed/sequence/layer.py,sha256=3dbXJ2BuCtHNKEf-zfiyvuJp7tmw-1Iaopbl5w15_Go,3791 +deepspeed/utils/__init__.py,sha256=DzbjPTrKTvrp0GMzTBzejuzXX8GHsPkqi5CdrAw3V-I,1242 +deepspeed/utils/__pycache__/__init__.cpython-310.pyc,, +deepspeed/utils/__pycache__/comms_logging.cpython-310.pyc,, +deepspeed/utils/__pycache__/debug.cpython-310.pyc,, +deepspeed/utils/__pycache__/exceptions.cpython-310.pyc,, +deepspeed/utils/__pycache__/groups.cpython-310.pyc,, +deepspeed/utils/__pycache__/init_on_device.cpython-310.pyc,, +deepspeed/utils/__pycache__/logging.cpython-310.pyc,, +deepspeed/utils/__pycache__/mixed_precision_linkage.cpython-310.pyc,, +deepspeed/utils/__pycache__/numa.cpython-310.pyc,, +deepspeed/utils/__pycache__/nvtx.cpython-310.pyc,, +deepspeed/utils/__pycache__/tensor_fragment.cpython-310.pyc,, +deepspeed/utils/__pycache__/timer.cpython-310.pyc,, +deepspeed/utils/__pycache__/types.cpython-310.pyc,, +deepspeed/utils/__pycache__/z3_leaf_module.cpython-310.pyc,, +deepspeed/utils/__pycache__/zero_to_fp32.cpython-310.pyc,, +deepspeed/utils/comms_logging.py,sha256=NGCs6SHN9msKDBP4MBlXOspajebMMXDzmZhpZ5Ff1XE,7846 +deepspeed/utils/debug.py,sha256=1GS4Yww4u-lNxIT1XxptUPsK_ND4DGBnTVURyBKvqxw,4504 +deepspeed/utils/exceptions.py,sha256=h4J_9uk3HmKG8LdiSO8DKYzOmIYrD1MGwtZD1nA-Q3g,144 +deepspeed/utils/groups.py,sha256=G5HXHX-eWYqESm329Vyc5pisgHknHvNiTUxH5DcOPgY,23187 +deepspeed/utils/init_on_device.py,sha256=Q4RFeRMi7PYGBHfTKh3E63X8IZquheDWYKbT56aOuuQ,3004 +deepspeed/utils/logging.py,sha256=Qnc75V0u249sIgFZ3972cr67vVxhJuCQi00Upu0_bFU,4375 +deepspeed/utils/mixed_precision_linkage.py,sha256=1dX8RSE1fHWwiY0gK7_pJHEYFiJHoJcN4K3W-i0eQZ8,2385 +deepspeed/utils/numa.py,sha256=SRRwiHFlvZ7HZ4EfPxMUOh4F9aCovmgIxDSggfqmYe8,7170 +deepspeed/utils/nvtx.py,sha256=rjO0SfexpacB1rAN9BiGtw-Xvj9WK74jA-9UZhBJ2M4,499 +deepspeed/utils/tensor_fragment.py,sha256=OZcWrCiYPPjlypJTpmj0S7WxRn3qhhVlmakSh_58I3M,11941 +deepspeed/utils/timer.py,sha256=keS1ZsnipWMdMK5sK8-1rzQLSzll14XMiNdrFR1174w,10404 +deepspeed/utils/types.py,sha256=IcEwFod7RqLTRCGf7lr2sbKhFRf1an1KvtQ98nswwNw,434 +deepspeed/utils/z3_leaf_module.py,sha256=qexuDAkm_-pcsONXdu7EpTjvtH3QzsbeB3L5WiNMtqk,4344 +deepspeed/utils/zero_to_fp32.py,sha256=R-IsPi6cVXBf9yEC_9EUa2B-gIgPIBHO1byWnRHqewU,25314 diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/REQUESTED b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/WHEEL b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..505164bc02d63fe6b0b3299f849a77c5f1beeb41 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.8.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/entry_points.txt b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..cfa0d2ef62101b531baf7ac75575e64b2c0cb6a2 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest_randomly.random_seeder] +deepspeed = deepspeed.runtime.utils:set_random_seed diff --git a/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/top_level.txt b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..0f2c819def28c8c4109156014fde317020d4ddb9 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/deepspeed-0.14.0.dist-info/top_level.txt @@ -0,0 +1 @@ +deepspeed diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/INSTALLER b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..272afdf8f32f26f6bc221715fcefb4f8d7e31bc9 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Phil Wang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..542e8dad43729fb8c0fbd2643e54548f0c9b8fd6 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/METADATA @@ -0,0 +1,18 @@ +Metadata-Version: 2.1 +Name: einops-exts +Version: 0.0.4 +Summary: Einops Extensions +Home-page: https://github.com/lucidrains/einops-exts +Author: Phil Wang +Author-email: lucidrains@gmail.com +License: MIT +Keywords: artificial intelligence,deep learning,tensor manipulation +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.6 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: einops (>=0.4) + diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..a969f980cd9cd72ec72643210f2a120df0456f33 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/RECORD @@ -0,0 +1,13 @@ +einops_exts-0.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +einops_exts-0.0.4.dist-info/LICENSE,sha256=xZDkKtpHE2TPCAeqKe1fjdpKernl1YW-d01j_1ltkAU,1066 +einops_exts-0.0.4.dist-info/METADATA,sha256=yIU5EfeQdzzh8Dc-Feg8_a6p4LVHj8J1OrDFWHbvOdw,621 +einops_exts-0.0.4.dist-info/RECORD,, +einops_exts-0.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +einops_exts-0.0.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +einops_exts-0.0.4.dist-info/top_level.txt,sha256=SckcduaUqHpfn7q_H49iPlKEPbKXTwKcCczc806qzes,12 +einops_exts/__init__.py,sha256=FT0AocRvAC7bgRVinoglTY4uNjWZwfSfu9xZYHEwV4k,232 +einops_exts/__pycache__/__init__.cpython-310.pyc,, +einops_exts/__pycache__/einops_exts.cpython-310.pyc,, +einops_exts/__pycache__/torch.cpython-310.pyc,, +einops_exts/einops_exts.py,sha256=HHeQbJgZcuA_04R9NpRRlBFASCb9xBNtgwlmsABcU7U,2131 +einops_exts/torch.py,sha256=A0orev4xcv41qp3EmXiDjWFcXclNCs9kHkadqYiOpv8,1045 diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/REQUESTED b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..57e3d840d59a650ac5bccbad5baeec47d155f0ad --- /dev/null +++ b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..c5d7c901a10fd3f1f3be1c44349fb5844a3372c1 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/einops_exts-0.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +einops_exts diff --git a/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/LICENSE b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..cc633905d333c4b42c1a0c8b34e9f734adeb6e1e --- /dev/null +++ b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Just van Rossum + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/METADATA b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..419722ab407d54ada769644e753e9256c68dabf3 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/METADATA @@ -0,0 +1,3343 @@ +Metadata-Version: 2.1 +Name: fonttools +Version: 4.54.1 +Summary: Tools to manipulate font files +Home-page: http://github.com/fonttools/fonttools +Author: Just van Rossum +Author-email: just@letterror.com +Maintainer: Behdad Esfahbod +Maintainer-email: behdad@behdad.org +License: MIT +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: End Users/Desktop +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Text Processing :: Fonts +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Requires-Python: >=3.8 +License-File: LICENSE +Provides-Extra: all +Requires-Dist: fs <3,>=2.2.0 ; extra == 'all' +Requires-Dist: lxml >=4.0 ; extra == 'all' +Requires-Dist: zopfli >=0.1.4 ; extra == 'all' +Requires-Dist: lz4 >=1.7.4.2 ; extra == 'all' +Requires-Dist: pycairo ; extra == 'all' +Requires-Dist: matplotlib ; extra == 'all' +Requires-Dist: sympy ; extra == 'all' +Requires-Dist: skia-pathops >=0.5.0 ; extra == 'all' +Requires-Dist: uharfbuzz >=0.23.0 ; extra == 'all' +Requires-Dist: brotlicffi >=0.8.0 ; (platform_python_implementation != "CPython") and extra == 'all' +Requires-Dist: scipy ; (platform_python_implementation != "PyPy") and extra == 'all' +Requires-Dist: brotli >=1.0.1 ; (platform_python_implementation == "CPython") and extra == 'all' +Requires-Dist: munkres ; (platform_python_implementation == "PyPy") and extra == 'all' +Requires-Dist: unicodedata2 >=15.1.0 ; (python_version <= "3.12") and extra == 'all' +Requires-Dist: xattr ; (sys_platform == "darwin") and extra == 'all' +Provides-Extra: graphite +Requires-Dist: lz4 >=1.7.4.2 ; extra == 'graphite' +Provides-Extra: interpolatable +Requires-Dist: pycairo ; extra == 'interpolatable' +Requires-Dist: scipy ; (platform_python_implementation != "PyPy") and extra == 'interpolatable' +Requires-Dist: munkres ; (platform_python_implementation == "PyPy") and extra == 'interpolatable' +Provides-Extra: lxml +Requires-Dist: lxml >=4.0 ; extra == 'lxml' +Provides-Extra: pathops +Requires-Dist: skia-pathops >=0.5.0 ; extra == 'pathops' +Provides-Extra: plot +Requires-Dist: matplotlib ; extra == 'plot' +Provides-Extra: repacker +Requires-Dist: uharfbuzz >=0.23.0 ; extra == 'repacker' +Provides-Extra: symfont +Requires-Dist: sympy ; extra == 'symfont' +Provides-Extra: type1 +Requires-Dist: xattr ; (sys_platform == "darwin") and extra == 'type1' +Provides-Extra: ufo +Requires-Dist: fs <3,>=2.2.0 ; extra == 'ufo' +Provides-Extra: unicode +Requires-Dist: unicodedata2 >=15.1.0 ; (python_version <= "3.12") and extra == 'unicode' +Provides-Extra: woff +Requires-Dist: zopfli >=0.1.4 ; extra == 'woff' +Requires-Dist: brotlicffi >=0.8.0 ; (platform_python_implementation != "CPython") and extra == 'woff' +Requires-Dist: brotli >=1.0.1 ; (platform_python_implementation == "CPython") and extra == 'woff' + +|CI Build Status| |Coverage Status| |PyPI| |Gitter Chat| + +What is this? +~~~~~~~~~~~~~ + +| fontTools is a library for manipulating fonts, written in Python. The + project includes the TTX tool, that can convert TrueType and OpenType + fonts to and from an XML text format, which is also called TTX. It + supports TrueType, OpenType, AFM and to an extent Type 1 and some + Mac-specific formats. The project has an `MIT open-source + licence `__. +| Among other things this means you can use it free of charge. + +`User documentation `_ and +`developer documentation `_ +are available at `Read the Docs `_. + +Installation +~~~~~~~~~~~~ + +FontTools requires `Python `__ 3.8 +or later. We try to follow the same schedule of minimum Python version support as +NumPy (see `NEP 29 `__). + +The package is listed in the Python Package Index (PyPI), so you can +install it with `pip `__: + +.. code:: sh + + pip install fonttools + +If you would like to contribute to its development, you can clone the +repository from GitHub, install the package in 'editable' mode and +modify the source code in place. We recommend creating a virtual +environment, using `virtualenv `__ or +Python 3 `venv `__ module. + +.. code:: sh + + # download the source code to 'fonttools' folder + git clone https://github.com/fonttools/fonttools.git + cd fonttools + + # create new virtual environment called e.g. 'fonttools-venv', or anything you like + python -m virtualenv fonttools-venv + + # source the `activate` shell script to enter the environment (Unix-like); to exit, just type `deactivate` + . fonttools-venv/bin/activate + + # to activate the virtual environment in Windows `cmd.exe`, do + fonttools-venv\Scripts\activate.bat + + # install in 'editable' mode + pip install -e . + +Optional Requirements +--------------------- + +The ``fontTools`` package currently has no (required) external dependencies +besides the modules included in the Python Standard Library. +However, a few extra dependencies are required by some of its modules, which +are needed to unlock optional features. +The ``fonttools`` PyPI distribution also supports so-called "extras", i.e. a +set of keywords that describe a group of additional dependencies, which can be +used when installing via pip, or when specifying a requirement. +For example: + +.. code:: sh + + pip install fonttools[ufo,lxml,woff,unicode] + +This command will install fonttools, as well as the optional dependencies that +are required to unlock the extra features named "ufo", etc. + +- ``Lib/fontTools/misc/etree.py`` + + The module exports a ElementTree-like API for reading/writing XML files, and + allows to use as the backend either the built-in ``xml.etree`` module or + `lxml `__. The latter is preferred whenever present, + as it is generally faster and more secure. + + *Extra:* ``lxml`` + +- ``Lib/fontTools/ufoLib`` + + Package for reading and writing UFO source files; it requires: + + * `fs `__: (aka ``pyfilesystem2``) filesystem + abstraction layer. + + * `enum34 `__: backport for the built-in ``enum`` + module (only required on Python < 3.4). + + *Extra:* ``ufo`` + +- ``Lib/fontTools/ttLib/woff2.py`` + + Module to compress/decompress WOFF 2.0 web fonts; it requires: + + * `brotli `__: Python bindings of + the Brotli compression library. + + *Extra:* ``woff`` + +- ``Lib/fontTools/ttLib/sfnt.py`` + + To better compress WOFF 1.0 web fonts, the following module can be used + instead of the built-in ``zlib`` library: + + * `zopfli `__: Python bindings of + the Zopfli compression library. + + *Extra:* ``woff`` + +- ``Lib/fontTools/unicode.py`` + + To display the Unicode character names when dumping the ``cmap`` table + with ``ttx`` we use the ``unicodedata`` module in the Standard Library. + The version included in there varies between different Python versions. + To use the latest available data, you can install: + + * `unicodedata2 `__: + ``unicodedata`` backport for Python 3.x updated to the latest Unicode + version 15.0. + + *Extra:* ``unicode`` + +- ``Lib/fontTools/varLib/interpolatable.py`` + + Module for finding wrong contour/component order between different masters. + It requires one of the following packages in order to solve the so-called + "minimum weight perfect matching problem in bipartite graphs", or + the Assignment problem: + + * `scipy `__: the Scientific Library + for Python, which internally uses `NumPy `__ + arrays and hence is very fast; + * `munkres `__: a pure-Python + module that implements the Hungarian or Kuhn-Munkres algorithm. + + To plot the results to a PDF or HTML format, you also need to install: + + * `pycairo `__: Python bindings for the + Cairo graphics library. Note that wheels are currently only available for + Windows, for other platforms see pycairo's `installation instructions + `__. + + *Extra:* ``interpolatable`` + +- ``Lib/fontTools/varLib/plot.py`` + + Module for visualizing DesignSpaceDocument and resulting VariationModel. + + * `matplotlib `__: 2D plotting library. + + *Extra:* ``plot`` + +- ``Lib/fontTools/misc/symfont.py`` + + Advanced module for symbolic font statistics analysis; it requires: + + * `sympy `__: the Python library for + symbolic mathematics. + + *Extra:* ``symfont`` + +- ``Lib/fontTools/t1Lib.py`` + + To get the file creator and type of Macintosh PostScript Type 1 fonts + on Python 3 you need to install the following module, as the old ``MacOS`` + module is no longer included in Mac Python: + + * `xattr `__: Python wrapper for + extended filesystem attributes (macOS platform only). + + *Extra:* ``type1`` + +- ``Lib/fontTools/ttLib/removeOverlaps.py`` + + Simplify TrueType glyphs by merging overlapping contours and components. + + * `skia-pathops `__: Python + bindings for the Skia library's PathOps module, performing boolean + operations on paths (union, intersection, etc.). + + *Extra:* ``pathops`` + +- ``Lib/fontTools/pens/cocoaPen.py`` and ``Lib/fontTools/pens/quartzPen.py`` + + Pens for drawing glyphs with Cocoa ``NSBezierPath`` or ``CGPath`` require: + + * `PyObjC `__: the bridge between + Python and the Objective-C runtime (macOS platform only). + +- ``Lib/fontTools/pens/qtPen.py`` + + Pen for drawing glyphs with Qt's ``QPainterPath``, requires: + + * `PyQt5 `__: Python bindings for + the Qt cross platform UI and application toolkit. + +- ``Lib/fontTools/pens/reportLabPen.py`` + + Pen to drawing glyphs as PNG images, requires: + + * `reportlab `__: Python toolkit + for generating PDFs and graphics. + +- ``Lib/fontTools/pens/freetypePen.py`` + + Pen to drawing glyphs with FreeType as raster images, requires: + + * `freetype-py `__: Python binding + for the FreeType library. + +- ``Lib/fontTools/ttLib/tables/otBase.py`` + + Use the Harfbuzz library to serialize GPOS/GSUB using ``hb_repack`` method, requires: + + * `uharfbuzz `__: Streamlined Cython + bindings for the harfbuzz shaping engine + + *Extra:* ``repacker`` + +How to make a new release +~~~~~~~~~~~~~~~~~~~~~~~~~ + +1) Update ``NEWS.rst`` with all the changes since the last release. Write a + changelog entry for each PR, with one or two short sentences summarizing it, + as well as links to the PR and relevant issues addressed by the PR. Do not + put a new title, the next command will do it for you. +2) Use semantic versioning to decide whether the new release will be a 'major', + 'minor' or 'patch' release. It's usually one of the latter two, depending on + whether new backward compatible APIs were added, or simply some bugs were fixed. +3) From inside a venv, first do ``pip install -r dev-requirements.txt``, then run + the ``python setup.py release`` command from the tip of the ``main`` branch. + By default this bumps the third or 'patch' digit only, unless you pass ``--major`` + or ``--minor`` to bump respectively the first or second digit. + This bumps the package version string, extracts the changes since the latest + version from ``NEWS.rst``, and uses that text to create an annotated git tag + (or a signed git tag if you pass the ``--sign`` option and your git and Github + account are configured for `signing commits `__ + using a GPG key). + It also commits an additional version bump which opens the main branch for + the subsequent developmental cycle +4) Push both the tag and commit to the upstream repository, by running the command + ``git push --follow-tags``. Note: it may push other local tags as well, be + careful. +5) Let the CI build the wheel and source distribution packages and verify both + get uploaded to the Python Package Index (PyPI). +6) [Optional] Go to fonttools `Github Releases `__ + page and create a new release, copy-pasting the content of the git tag + message. This way, the release notes are nicely formatted as markdown, and + users watching the repo will get an email notification. One day we shall + automate that too. + + +Acknowledgements +~~~~~~~~~~~~~~~~ + +In alphabetical order: + +aschmitz, Olivier Berten, Samyak Bhuta, Erik van Blokland, Petr van Blokland, +Jelle Bosma, Sascha Brawer, Tom Byrer, Antonio Cavedoni, Frédéric Coiffier, +Vincent Connare, David Corbett, Simon Cozens, Dave Crossland, Simon Daniels, +Peter Dekkers, Behdad Esfahbod, Behnam Esfahbod, Hannes Famira, Sam Fishman, +Matt Fontaine, Takaaki Fuji, Rob Hagemans, Yannis Haralambous, Greg Hitchcock, +Jeremie Hornus, Khaled Hosny, John Hudson, Denis Moyogo Jacquerye, Jack Jansen, +Tom Kacvinsky, Jens Kutilek, Antoine Leca, Werner Lemberg, Tal Leming, Peter +Lofting, Cosimo Lupo, Olli Meier, Masaya Nakamura, Dave Opstad, Laurence Penney, +Roozbeh Pournader, Garret Rieger, Read Roberts, Colin Rofls, Guido van Rossum, +Just van Rossum, Andreas Seidel, Georg Seifert, Chris Simpkins, Miguel Sousa, +Adam Twardoch, Adrien Tétar, Vitaly Volkov, Paul Wise. + +Copyrights +~~~~~~~~~~ + +| Copyright (c) 1999-2004 Just van Rossum, LettError + (just@letterror.com) +| See `LICENSE `__ for the full license. + +Copyright (c) 2000 BeOpen.com. All Rights Reserved. + +Copyright (c) 1995-2001 Corporation for National Research Initiatives. +All Rights Reserved. + +Copyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam. All +Rights Reserved. + +Have fun! + +.. |CI Build Status| image:: https://github.com/fonttools/fonttools/workflows/Test/badge.svg + :target: https://github.com/fonttools/fonttools/actions?query=workflow%3ATest +.. |Coverage Status| image:: https://codecov.io/gh/fonttools/fonttools/branch/main/graph/badge.svg + :target: https://codecov.io/gh/fonttools/fonttools +.. |PyPI| image:: https://img.shields.io/pypi/v/fonttools.svg + :target: https://pypi.org/project/FontTools +.. |Gitter Chat| image:: https://badges.gitter.im/fonttools-dev/Lobby.svg + :alt: Join the chat at https://gitter.im/fonttools-dev/Lobby + :target: https://gitter.im/fonttools-dev/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +Changelog +~~~~~~~~~ + +4.54.1 (released 2024-09-24) +---------------------------- + +- [unicodedata] Update to Unicode 16 +- [subset] Escape ``\\`` in doc string + +4.54.0 (released 2024-09-23) +---------------------------- + +- [Docs] Small docs cleanups by @n8willis (#3611) +- [Docs] cleanup code blocks by @n8willis (#3627) +- [Docs] fix Sphinx builds by @n8willis (#3625) +- [merge] Minor fixes to documentation for merge by @drj11 (#3588) +- [subset] Small tweaks to pyftsubset documentation by @RoelN (#3633) +- [Tests] Do not require fonttools command to be available by @behdad (#3612) +- [Tests] subset_test: add failing test to reproduce issue #3616 by @anthrotype (#3622) +- [ttLib] NameRecordVisitor: include whole sequence of character variants' UI labels, not just the first by @anthrotype (#3617) +- [varLib.avar] Reconstruct mappings from binary by @behdad (#3598) +- [varLib.instancer] Fix visual artefacts with partial L2 instancing by @Hoolean (#3635) +- [varLib.interpolatable] Support discrete axes in .designspace by @behdad (#3599) +- [varLib.models] By default, assume OpenType-like normalized space by @behdad (#3601) + +4.53.1 (released 2024-07-05) +---------------------------- + +- [feaLib] Improve the sharing of inline chained lookups (#3559) +- [otlLib] Correct the calculation of OS/2.usMaxContext with reversed chaining contextual single substitutions (#3569) +- [misc.visitor] Visitors search the inheritance chain of objects they are visiting (#3581) + +4.53.0 (released 2024-05-31) +---------------------------- + +- [ttLib.removeOverlaps] Support CFF table to aid in downconverting CFF2 fonts (#3528) +- [avar] Fix crash when accessing not-yet-existing attribute (#3550) +- [docs] Add buildMathTable to otlLib.builder documentation (#3540) +- [feaLib] Allow UTF-8 with BOM when reading features (#3495) +- [SVGPathPen] Revert rounding coordinates to two decimal places by default (#3543) +- [varLib.instancer] Refix output filename decision-making (#3545, #3544, #3548) + +4.52.4 (released 2024-05-27) +---------------------------- + +- [varLib.cff] Restore and deprecate convertCFFtoCFF2 that was removed in 4.52.0 + release as it is used by downstream projects (#3535). + +4.52.3 (released 2024-05-27) +---------------------------- + +- Fixed a small syntax error in the reStructuredText-formatted NEWS.rst file + which caused the upload to PyPI to fail for 4.52.2. No other code changes. + +4.52.2 (released 2024-05-27) +---------------------------- + +- [varLib.interpolatable] Ensure that scipy/numpy output is JSON-serializable + (#3522, #3526). +- [housekeeping] Regenerate table lists, to fix pyinstaller packaging of the new + ``VARC`` table (#3531, #3529). +- [cffLib] Make CFFToCFF2 and CFF2ToCFF more robust (#3521, #3525). + +4.52.1 (released 2024-05-24) +---------------------------- + +- Fixed a small syntax error in the reStructuredText-formatted NEWS.rst file + which caused the upload to PyPI to fail for 4.52.0. No other code changes. + +4.52.0 (released 2024-05-24) +---------------------------- + +- Added support for the new ``VARC`` (Variable Composite) table that is being + proposed to OpenType spec (#3395). For more info: + https://github.com/harfbuzz/boring-expansion-spec/blob/main/VARC.md +- [ttLib.__main__] Fixed decompiling all tables (90fed08). +- [feaLib] Don't reference the same lookup index multiple times within the same + feature record, it is only applied once anyway (#3520). +- [cffLib] Moved methods to desubroutinize, remove hints and unused subroutines + from subset module to cffLib (#3517). +- [varLib.instancer] Added support for partial-instancing CFF2 tables! Also, added + method to down-convert from CFF2 to CFF 1.0, and CLI entry points to convert + CFF<->CFF2 (#3506). +- [subset] Prune unused user name IDs even with --name-IDs='*' (#3410). +- [ttx] use GNU-style getopt to intermix options and positional arguments (#3509). +- [feaLib.variableScalar] Fixed ``value_at_location()`` method (#3491) +- [psCharStrings] Shorten output of ``encodeFloat`` (#3492). +- [bezierTools] Fix infinite-recursion in ``calcCubicArcLength`` (#3502). +- [avar2] Implement ``avar2`` support in ``TTFont.getGlyphSet()`` (#3473). + +4.51.0 (released 2024-04-05) +---------------------------- + +- [ttLib] Optimization on loading aux fields (#3464). +- [ttFont] Add reorderGlyphs (#3468). + +4.50.0 (released 2024-03-15) +---------------------------- + +- [pens] Added decomposing filter pens that draw components as regular contours (#3460). +- [instancer] Drop explicit no-op axes from TupleVariations (#3457). +- [cu2qu/ufo] Return set of modified glyph names from fonts_to_quadratic (#3456). + +4.49.0 (released 2024-02-15) +---------------------------- + +- [otlLib] Add API for building ``MATH`` table (#3446) + +4.48.1 (released 2024-02-06) +---------------------------- + +- Fixed uploading wheels to PyPI, no code changes since v4.48.0. + +4.48.0 (released 2024-02-06) +---------------------------- + +- [varLib] Do not log when there are no OTL tables to be merged. +- [setup.py] Do not restrict lxml<5 any more, tests pass just fine with lxml>=5. +- [feaLib] Remove glyph and class names length restrictions in FEA (#3424). +- [roundingPens] Added ``transformRoundFunc`` parameter to the rounding pens to allow + for custom rounding of the components' transforms (#3426). +- [feaLib] Keep declaration order of ligature components within a ligature set, instead + of sorting by glyph name (#3429). +- [feaLib] Fixed ordering of alternates in ``aalt`` lookups, following the declaration + order of feature references within the ``aalt`` feature block (#3430). +- [varLib.instancer] Fixed a bug in the instancer's IUP optimization (#3432). +- [sbix] Support sbix glyphs with new graphicType "flip" (#3433). +- [svgPathPen] Added ``--glyphs`` option to dump the SVG paths for the named glyphs + in the font (0572f78). +- [designspaceLib] Added "description" attribute to ```` and ```` + elements, and allow multiple ```` elements to group ```` elements + that are logically related (#3435, #3437). +- [otlLib] Correctly choose the most compact GSUB contextual lookup format (#3439). + +4.47.2 (released 2024-01-11) +---------------------------- + +Minor release to fix uploading wheels to PyPI. + +4.47.1 (released 2024-01-11) +---------------------------- + +- [merge] Improve help message and add standard command line options (#3408) +- [otlLib] Pass ``ttFont`` to ``name.addName`` in ``buildStatTable`` (#3406) +- [featureVars] Re-use ``FeatureVariationRecord``'s when possible (#3413) + +4.47.0 (released 2023-12-18) +---------------------------- + +- [varLib.models] New API for VariationModel: ``getMasterScalars`` and + ``interpolateFromValuesAndScalars``. +- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular, + add a Summary page in the front, and an Index and Table-of-Contents in the back. + Change the page size to Letter. +- [Docs/designspaceLib] Defined a new ``public.fontInfo`` lib key, not used anywhere yet (#3358). + +4.46.0 (released 2023-12-02) +---------------------------- + +- [featureVars] Allow to register the same set of substitution rules to multiple features. + The ``addFeatureVariations`` function can now take a list of featureTags; similarly, the + lib key 'com.github.fonttools.varLib.featureVarsFeatureTag' can now take a + comma-separateed string of feature tags (e.g. "salt,ss01") instead of a single tag (#3360). +- [featureVars] Don't overwrite GSUB FeatureVariations, but append new records to it + for features which are not already there. But raise ``VarLibError`` if the feature tag + already has feature variations associated with it (#3363). +- [varLib] Added ``addGSUBFeatureVariations`` function to add GSUB Feature Variations + to an existing variable font from rules defined in a DesignSpace document (#3362). +- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular, + a new test for "underweight" glyphs. The new test reports quite a few false-positives + though. Please send feedback. + +4.45.1 (released 2023-11-23) +---------------------------- + +- [varLib.interpolatable] Various bugfixes and improvements, better reporting, reduced + false positives. +- [ttGlyphSet] Added option to not recalculate glyf bounds (#3348). + +4.45.0 (released 2023-11-20) +---------------------------- + +- [varLib.interpolatable] Vastly improved algorithms. Also available now is ``--pdf`` + and ``--html`` options to generate a PDF or HTML report of the interpolation issues. + The PDF/HTML report showcases the problematic masters, the interpolated broken + glyph, as well as the proposed fixed version. + +4.44.3 (released 2023-11-15) +---------------------------- + +- [subset] Only prune codepage ranges for OS/2.version >= 1, ignore otherwise (#3334). +- [instancer] Ensure hhea vertical metrics stay in sync with OS/2 ones after instancing + MVAR table containing 'hasc', 'hdsc' or 'hlgp' tags (#3297). + +4.44.2 (released 2023-11-14) +---------------------------- + +- [glyf] Have ``Glyph.recalcBounds`` skip empty components (base glyph with no contours) + when computing the bounding box of composite glyphs. This simply restores the existing + behavior before some changes were introduced in fonttools 4.44.0 (#3333). + +4.44.1 (released 2023-11-14) +---------------------------- + +- [feaLib] Ensure variable mark anchors are deep-copied while building since they + get modified in-place and later reused (#3330). +- [OS/2|subset] Added method to ``recalcCodePageRanges`` to OS/2 table class; added + ``--prune-codepage-ranges`` to `fonttools subset` command (#3328, #2607). + +4.44.0 (released 2023-11-03) +---------------------------- + +- [instancer] Recalc OS/2 AvgCharWidth after instancing if default changes (#3317). +- [otlLib] Make ClassDefBuilder class order match varLib.merger's, i.e. large + classes first, then glyph lexicographic order (#3321, #3324). +- [instancer] Allow not specifying any of min:default:max values and let be filled + up with fvar's values (#3322, #3323). +- [instancer] When running --update-name-table ignore axes that have no STAT axis + values (#3318, #3319). +- [Debg] When dumping to ttx, write the embedded JSON as multi-line string with + indentation (92cbfee0d). +- [varStore] Handle > 65535 items per encoding by splitting VarData subtable (#3310). +- [subset] Handle null-offsets in MarkLigPos subtables. +- [subset] Keep East Asian spacing fatures vhal, halt, chws, vchw by default (#3305). +- [instancer.solver] Fixed case where axisDef < lower and upper < axisMax (#3304). +- [glyf] Speed up compilation, mostly around ``recalcBounds`` (#3301). +- [varLib.interpolatable] Speed it up when working on variable fonts, plus various + micro-optimizations (#3300). +- Require unicodedata2 >= 15.1.0 when installed with 'unicode' extra, contains UCD 15.1. + +4.43.1 (released 2023-10-06) +---------------------------- + +- [EBDT] Fixed TypeError exception in `_reverseBytes` method triggered when dumping + some bitmap fonts with `ttx -z bitwise` option (#3162). +- [v/hhea] Fixed UnboundLocalError exception in ``recalc`` method when no vmtx or hmtx + tables are present (#3290). +- [bezierTools] Fixed incorrectly typed cython local variable leading to TypeError when + calling ``calcQuadraticArcLength`` (#3288). +- [feaLib/otlLib] Better error message when building Coverage table with missing glyph (#3286). + +4.43.0 (released 2023-09-29) +---------------------------- + +- [subset] Set up lxml ``XMLParser(resolve_entities=False)`` when parsing OT-SVG documents + to prevent XML External Entity (XXE) attacks (9f61271dc): + https://codeql.github.com/codeql-query-help/python/py-xxe/ +- [varLib.iup] Added workaround for a Cython bug in ``iup_delta_optimize`` that was + leading to IUP tolerance being incorrectly initialised, resulting in sub-optimal deltas + (60126435d, cython/cython#5732). +- [varLib] Added new command-line entry point ``fonttools varLib.avar`` to add an + ``avar`` table to an existing VF from axes mappings in a .designspace file (0a3360e52). +- [instancer] Fixed bug whereby no longer used variation regions were not correctly pruned + after VarData optimization (#3268). +- Added support for Python 3.12 (#3283). + +4.42.1 (released 2023-08-20) +---------------------------- + +- [t1Lib] Fixed several Type 1 issues (#3238, #3240). +- [otBase/packer] Allow sharing tables reached by different offset sizes (#3241, #3236). +- [varLib/merger] Fix Cursive attachment merging error when all anchors are NULL (#3248, #3247). +- [ttLib] Fixed warning when calling ``addMultilingualName`` and ``ttFont`` parameter was not + passed on to ``findMultilingualName`` (#3253). + +4.42.0 (released 2023-08-02) +---------------------------- + +- [varLib] Use sentinel value 0xFFFF to mark a glyph advance in hmtx/vmtx as non + participating, allowing sparse masters to contain glyphs for variation purposes other + than {H,V}VAR (#3235). +- [varLib/cff] Treat empty glyphs in non-default masters as missing, thus not participating + in CFF2 delta computation, similarly to how varLib already treats them for gvar (#3234). +- Added varLib.avarPlanner script to deduce 'correct' avar v1 axis mappings based on + glyph average weights (#3223). + +4.41.1 (released 2023-07-21) +---------------------------- + +- [subset] Fixed perf regression in v4.41.0 by making ``NameRecordVisitor`` only visit + tables that do contain nameID references (#3213, #3214). +- [varLib.instancer] Support instancing fonts containing null ConditionSet offsets in + FeatureVariationRecords (#3211, #3212). +- [statisticsPen] Report font glyph-average weight/width and font-wide slant. +- [fontBuilder] Fixed head.created date incorrectly set to 0 instead of the current + timestamp, regression introduced in v4.40.0 (#3210). +- [varLib.merger] Support sparse ``CursivePos`` masters (#3209). + +4.41.0 (released 2023-07-12) +---------------------------- + +- [fontBuilder] Fixed bug in setupOS2 with default panose attribute incorrectly being + set to a dict instead of a Panose object (#3201). +- [name] Added method to ``removeUnusedNameRecords`` in the user range (#3185). +- [varLib.instancer] Fixed issue with L4 instancing (moving default) (#3179). +- [cffLib] Use latin1 so we can roundtrip non-ASCII in {Full,Font,Family}Name (#3202). +- [designspaceLib] Mark as optional in docs (as it is in the code). +- [glyf-1] Fixed drawPoints() bug whereby last cubic segment becomes quadratic (#3189, #3190). +- [fontBuilder] Propagate the 'hidden' flag to the fvar Axis instance (#3184). +- [fontBuilder] Update setupAvar() to also support avar 2, fixing ``_add_avar()`` call + site (#3183). +- Added new ``voltLib.voltToFea`` submodule (originally Tiro Typeworks' "Volto") for + converting VOLT OpenType Layout sources to FEA format (#3164). + +4.40.0 (released 2023-06-12) +---------------------------- + +- Published native binary wheels to PyPI for all the python minor versions and platform + and architectures currently supported that would benefit from this. They will include + precompiled Cython-accelerated modules (e.g. cu2qu) without requiring to compile them + from source. The pure-python wheel and source distribution will continue to be + published as always (pip will automatically chose them when no binary wheel is + available for the given platform, e.g. pypy). Use ``pip install --no-binary=fonttools fonttools`` + to expliclity request pip to install from the pure-python source. +- [designspaceLib|varLib] Add initial support for specifying axis mappings and build + ``avar2`` table from those (#3123). +- [feaLib] Support variable ligature caret position (#3130). +- [varLib|glyf] Added option to --drop-implied-oncurves; test for impliable oncurve + points either before or after rounding (#3146, #3147, #3155, #3156). +- [TTGlyphPointPen] Don't error with empty contours, simply ignore them (#3145). +- [sfnt] Fixed str vs bytes remnant of py3 transition in code dealing with de/compiling + WOFF metadata (#3129). +- [instancer-solver] Fixed bug when moving default instance with sparse masters (#3139, #3140). +- [feaLib] Simplify variable scalars that don’t vary (#3132). +- [pens] Added filter pen that explicitly emits closing line when lastPt != movePt (#3100). +- [varStore] Improve optimize algorithm and better document the algorithm (#3124, #3127). + Added ``quantization`` option (#3126). +- Added CI workflow config file for building native binary wheels (#3121). +- [fontBuilder] Added glyphDataFormat=0 option; raise error when glyphs contain cubic + outlines but glyphDataFormat was not explicitly set to 1 (#3113, #3119). +- [subset] Prune emptied GDEF.MarkGlyphSetsDef and remap indices; ensure GDEF is + subsetted before GSUB and GPOS (#3114, #3118). +- [xmlReader] Fixed issue whereby DSIG table data was incorrectly parsed (#3115, #2614). +- [varLib/merger] Fixed merging of SinglePos with pos=0 (#3111, #3112). +- [feaLib] Demote "Feature has not been defined" error to a warning when building aalt + and referenced feature is empty (#3110). +- [feaLib] Dedupe multiple substitutions with classes (#3105). + +4.39.4 (released 2023-05-10) +---------------------------- + +- [varLib.interpolatable] Allow for sparse masters (#3075) +- [merge] Handle differing default/nominalWidthX in CFF (#3070) +- [ttLib] Add missing main.py file to ttLib package (#3088) +- [ttx] Fix missing composite instructions in XML (#3092) +- [ttx] Fix split tables option to work on filenames containing '%' (#3096) +- [featureVars] Process lookups for features other than rvrn last (#3099) +- [feaLib] support multiple substitution with classes (#3103) + +4.39.3 (released 2023-03-28) +---------------------------- + +- [sbix] Fixed TypeError when compiling empty glyphs whose imageData is None, regression + was introduced in v4.39 (#3059). +- [ttFont] Fixed AttributeError on python <= 3.10 when opening a TTFont from a tempfile + SpooledTemporaryFile, seekable method only added on python 3.11 (#3052). + +4.39.2 (released 2023-03-16) +---------------------------- + +- [varLib] Fixed regression introduced in 4.39.1 whereby an incomplete 'STAT' table + would be built even though a DesignSpace v5 did contain 'STAT' definitions (#3045, #3046). + +4.39.1 (released 2023-03-16) +---------------------------- + +- [avar2] Added experimental support for reading/writing avar version 2 as specified in + this draft proposal: https://github.com/harfbuzz/boring-expansion-spec/blob/main/avar2.md +- [glifLib] Wrap underlying XML library exceptions with GlifLibError when parsing GLIFs, + and also print the name and path of the glyph that fails to be parsed (#3042). +- [feaLib] Consult avar for normalizing user-space values in ConditionSets and in + VariableScalars (#3042, #3043). +- [ttProgram] Handle string input to Program.fromAssembly() (#3038). +- [otlLib] Added a config option to emit GPOS 7 lookups, currently disabled by default + because of a macOS bug (#3034). +- [COLRv1] Added method to automatically compute ClipBoxes (#3027). +- [ttFont] Fixed getGlyphID to raise KeyError on missing glyphs instead of returning + None. The regression was introduced in v4.27.0 (#3032). +- [sbix] Fixed UnboundLocalError: cannot access local variable 'rawdata' (#3031). +- [varLib] When building VF, do not overwrite a pre-existing ``STAT`` table that was built + with feaLib from FEA feature file. Also, added support for building multiple VFs + defined in Designspace v5 from ``fonttools varLib`` script (#3024). +- [mtiLib] Only add ``Debg`` table with lookup names when ``FONTTOOLS_LOOKUP_DEBUGGING`` + env variable is set (#3023). + +4.39.0 (released 2023-03-06) +---------------------------- + +- [mtiLib] Optionally add `Debg` debug info for MTI feature builds (#3018). +- [ttx] Support reading input file from standard input using special `-` character, + similar to existing `-o -` option to write output to standard output (#3020). +- [cython] Prevent ``cython.compiled`` raise AttributeError if cython not installed + properly (#3017). +- [OS/2] Guard against ZeroDivisionError when calculating xAvgCharWidth in the unlikely + scenario no glyph has non-zero advance (#3015). +- [subset] Recompute xAvgCharWidth independently of --no-prune-unicode-ranges, + previously the two options were involuntarily bundled together (#3012). +- [fontBuilder] Add ``debug`` parameter to addOpenTypeFeatures method to add source + debugging information to the font in the ``Debg`` private table (#3008). +- [name] Make NameRecord `__lt__` comparison not fail on Unicode encoding errors (#3006). +- [featureVars] Fixed bug in ``overlayBox`` (#3003, #3005). +- [glyf] Added experimental support for cubic bezier curves in TrueType glyf table, as + outlined in glyf v1 proposal (#2988): + https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-cubicOutlines.md +- Added new qu2cu module and related qu2cuPen, the reverse of cu2qu for converting + TrueType quadratic splines to cubic bezier curves (#2993). +- [glyf] Added experimental support for reading and writing Variable Composites/Components + as defined in glyf v1 spec proposal (#2958): + https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-varComposites.md. +- [pens]: Added `addVarComponent` method to pen protocols' base classes, which pens can implement + to handle varcomponents (by default they get decomposed) (#2958). +- [misc.transform] Added DecomposedTransform class which implements an affine transformation + with separate translate, rotation, scale, skew, and transformation-center components (#2598) +- [sbix] Ensure Glyph.referenceGlyphName is set; fixes error after dumping and + re-compiling sbix table with 'dupe' glyphs (#2984). +- [feaLib] Be cleverer when merging chained single substitutions into same lookup + when they are specified using the inline notation (#2150, #2974). +- [instancer] Clamp user-inputted axis ranges to those of fvar (#2959). +- [otBase/subset] Define ``__getstate__`` for BaseTable so that a copied/pickled 'lazy' + object gets its own OTTableReader to read from; incidentally fixes a bug while + subsetting COLRv1 table containing ClipBoxes on python 3.11 (#2965, #2968). +- [sbix] Handle glyphs with "dupe" graphic type on compile correctly (#2963). +- [glyf] ``endPointsOfContours`` field should be unsigned! Kudos to behdad for + spotting one of the oldest bugs in FT. Probably nobody has ever dared to make + glyphs with more than 32767 points... (#2957). +- [feaLib] Fixed handling of ``ignore`` statements with unmarked glyphs to match + makeotf behavior, which assumes the first glyph is marked (#2950). +- Reformatted code with ``black`` and enforce new code style via CI check (#2925). +- [feaLib] Sort name table entries following OT spec prescribed order in the builder (#2927). +- [cu2quPen] Add Cu2QuMultiPen that converts multiple outlines at a time in + interpolation compatible way; its methods take a list of tuples arguments + that would normally be passed to individual segment pens, and at the end it + dispatches the converted outlines to each pen (#2912). +- [reverseContourPen/ttGlyphPen] Add outputImpliedClosingLine option (#2913, #2914, + #2921, #2922, #2995). +- [gvar] Avoid expanding all glyphs unnecessarily upon compile (#2918). +- [scaleUpem] Fixed bug whereby CFF2 vsindex was scaled; it should not (#2893, #2894). +- [designspaceLib] Add DS.getAxisByTag and refactor getAxis (#2891). +- [unicodedata] map Zmth<->math in ot_tag_{to,from}_script (#1737, #2889). +- [woff2] Support encoding/decoding OVERLAP_SIMPLE glyf flags (#2576, #2884). +- [instancer] Update OS/2 class and post.italicAngle when default moved (L4) +- Dropped support for Python 3.7 which reached EOL, fontTools requires 3.8+. +- [instancer] Fixed instantiateFeatureVariations logic when a rule range becomes + default-applicable (#2737, #2880). +- [ttLib] Add main to ttFont and ttCollection that just decompile and re-compile the + input font (#2869). +- [featureVars] Insert 'rvrn' lookup at the beginning of LookupList, to work around bug + in Apple implementation of 'rvrn' feature which the spec says it should be processed + early whereas on macOS 10.15 it follows lookup order (#2140, #2867). +- [instancer/mutator] Remove 'DSIG' table if present. +- [svgPathPen] Don't close path in endPath(), assume open unless closePath() (#2089, #2865). + +4.38.0 (released 2022-10-21) +---------------------------- + +- [varLib.instancer] Added support for L4 instancing, i.e. moving the default value of + an axis while keeping it variable. Thanks Behdad! (#2728, #2861). + It's now also possible to restrict an axis min/max values beyond the current default + value, e.g. a font wght has min=100, def=400, max=900 and you want a partial VF that + only varies between 500 and 700, you can now do that. + You can either specify two min/max values (wght=500:700), and the new default will be + set to either the minimum or maximum, depending on which one is closer to the current + default (e.g. 500 in this case). Or you can specify three values (e.g. wght=500:600:700) + to specify the new default value explicitly. +- [otlLib/featureVars] Set a few Count values so one doesn't need to compile the font + to update them (#2860). +- [varLib.models] Make extrapolation work for 2-master models as well where one master + is at the default location (#2843, #2846). + Add optional extrapolate=False to normalizeLocation() (#2847, #2849). +- [varLib.cff] Fixed sub-optimal packing of CFF2 deltas by no longer rounding them to + integer (#2838). +- [scaleUpem] Calculate numShorts in VarData after scale; handle CFF hintmasks (#2840). + +4.37.4 (released 2022-09-30) +---------------------------- + +- [subset] Keep nameIDs used by CPAL palette entry labels (#2837). +- [varLib] Avoid negative hmtx values when creating font from variable CFF2 font (#2827). +- [instancer] Don't prune stat.ElidedFallbackNameID (#2828). +- [unicodedata] Update Scripts/Blocks to Unicode 15.0 (#2833). + +4.37.3 (released 2022-09-20) +---------------------------- + +- Fix arguments in calls to (glyf) glyph.draw() and drawPoints(), whereby offset wasn't + correctly passed down; this fix also exposed a second bug, where lsb and tsb were not + set (#2824, #2825, adobe-type-tools/afdko#1560). + +4.37.2 (released 2022-09-15) +---------------------------- + +- [subset] Keep CPAL table and don't attempt to prune unused color indices if OT-SVG + table is present even if COLR table was subsetted away; OT-SVG may be referencing the + CPAL table; for now we assume that's the case (#2814, #2815). +- [varLib.instancer] Downgrade GPOS/GSUB version if there are no more FeatureVariations + after instancing (#2812). +- [subset] Added ``--no-lazy`` to optionally load fonts eagerly (mostly to ease + debugging of table lazy loading, no practical effects) (#2807). +- [varLib] Avoid building empty COLR.DeltaSetIndexMap with only identity mappings (#2803). +- [feaLib] Allow multiple value record types (by promoting to the most general format) + within the same PairPos subtable; e.g. this allows variable and non variable kerning + rules to share the same subtable. This also fixes a bug whereby some kerning pairs + would become unreachable while shapiong because of premature subtable splitting (#2772, #2776). +- [feaLib] Speed up ``VarScalar`` by caching models for recurring master locations (#2798). +- [feaLib] Optionally cythonize ``feaLib.lexer``, speeds up parsing FEA a bit (#2799). +- [designspaceLib] Avoid crash when handling unbounded rule conditions (#2797). +- [post] Don't crash if ``post`` legacy format 1 is malformed/improperly used (#2786) +- [gvar] Don't be "lazy" (load all glyph variations up front) when TTFont.lazy=False (#2771). +- [TTFont] Added ``normalizeLocation`` method to normalize a location dict from the + font's defined axes space (also known as "user space") into the normalized (-1..+1) + space. It applies ``avar`` mapping if the font contains an ``avar`` table (#2789). +- [TTVarGlyphSet] Support drawing glyph instances from CFF2 variable glyph set (#2784). +- [fontBuilder] Do not error when building cmap if there are zero code points (#2785). +- [varLib.plot] Added ability to plot a variation model and set of accompaning master + values corresponding to the model's master locations into a pyplot figure (#2767). +- [Snippets] Added ``statShape.py`` script to draw statistical shape of a glyph as an + ellips (requires pycairo) (baecd88). +- [TTVarGlyphSet] implement drawPoints natively, avoiding going through + SegmentToPointPen (#2778). +- [TTVarGlyphSet] Fixed bug whereby drawing a composite glyph multiple times, its + components would shif; needed an extra copy (#2774). + +4.37.1 (released 2022-08-24) +---------------------------- + +- [subset] Fixed regression introduced with v4.37.0 while subsetting the VarStore of + ``HVAR`` and ``VVAR`` tables, whereby an ``AttributeError: subset_varidxes`` was + thrown because an apparently unused import statement (with the side-effect of + dynamically binding that ``subset_varidxes`` method to the VarStore class) had been + accidentally deleted in an unrelated PR (#2679, #2773). +- [pens] Added ``cairoPen`` (#2678). +- [gvar] Read ``gvar`` more lazily by not parsing all of the ``glyf`` table (#2771). +- [ttGlyphSet] Make ``drawPoints(pointPen)`` method work for CFF fonts as well via + adapter pen (#2770). + +4.37.0 (released 2022-08-23) +---------------------------- + +- [varLib.models] Reverted PR #2717 which added support for "narrow tents" in v4.36.0, + as it introduced a regression (#2764, #2765). It will be restored in upcoming release + once we found a solution to the bug. +- [cff.specializer] Fixed issue in charstring generalizer with the ``blend`` operator + (#2750, #1975). +- [varLib.models] Added support for extrapolation (#2757). +- [ttGlyphSet] Ensure the newly added ``_TTVarGlyphSet`` inherits from ``_TTGlyphSet`` + to keep backward compatibility with existing API (#2762). +- [kern] Allow compiling legacy kern tables with more than 64k entries (d21cfdede). +- [visitor] Added new visitor API to traverse tree of objects and dispatch based + on the attribute type: cf. ``fontTools.misc.visitor`` and ``fontTools.ttLib.ttVisitor``. Added ``fontTools.ttLib.scaleUpem`` module that uses the latter to + change a font's units-per-em and scale all the related fields accordingly (#2718, + #2755). + +4.36.0 (released 2022-08-17) +---------------------------- + +- [varLib.models] Use a simpler model that generates narrower "tents" (regions, master + supports) whenever possible: specifically when any two axes that actively "cooperate" + (have masters at non-zero positions for both axes) have a complete set of intermediates. + The simpler algorithm produces fewer overlapping regions and behaves better with + respect to rounding at the peak positions than the generic solver, always matching + intermediate masters exactly, instead of maximally 0.5 units off. This may be useful + when 100% metrics compatibility is desired (#2218, #2717). +- [feaLib] Remove warning when about ``GDEF`` not being built when explicitly not + requested; don't build one unconditonally even when not requested (#2744, also works + around #2747). +- [ttFont] ``TTFont.getGlyphSet`` method now supports selecting a location that + represents an instance of a variable font (supports both user-scale and normalized + axes coordinates via the ``normalized=False`` parameter). Currently this only works + for TrueType-flavored variable fonts (#2738). + +4.35.0 (released 2022-08-15) +---------------------------- + +- [otData/otConverters] Added support for 'biased' PaintSweepGradient start/end angles + to match latest COLRv1 spec (#2743). +- [varLib.instancer] Fixed bug in ``_instantiateFeatureVariations`` when at the same + time pinning one axis and restricting the range of a subsequent axis; the wrong axis + tag was being used in the latter step (as the records' axisIdx was updated in the + preceding step but looked up using the old axes order in the following step) (#2733, + #2734). +- [mtiLib] Pad script tags with space when less than 4 char long (#1727). +- [merge] Use ``'.'`` instead of ``'#'`` in duplicate glyph names (#2742). +- [gvar] Added support for lazily loading glyph variations (#2741). +- [varLib] In ``build_many``, we forgot to pass on ``colr_layer_reuse`` parameter to + the ``build`` method (#2730). +- [svgPathPen] Add a main that prints SVG for input text (6df779fd). +- [cffLib.width] Fixed off-by-one in optimized values; previous code didn't match the + code block above it (2963fa50). +- [varLib.interpolatable] Support reading .designspace and .glyphs files (via optional + ``glyphsLib``). +- Compile some modules with Cython when available and building/installing fonttools + from source: ``varLib.iup`` (35% faster), ``pens.momentsPen`` (makes + ``varLib.interpolatable`` 3x faster). +- [feaLib] Allow features to be built for VF without also building a GDEF table (e.g. + only build GSUB); warn when GDEF would be needed but isn't requested (#2705, 2694). +- [otBase] Fixed ``AttributeError`` when uharfbuzz < 0.23.0 and 'repack' method is + missing (32aa8eaf). Use new ``uharfbuzz.repack_with_tag`` when available (since + uharfbuzz>=0.30.0), enables table-specific optimizations to be performed during + repacking (#2724). +- [statisticsPen] By default report all glyphs (4139d891). Avoid division-by-zero + (52b28f90). +- [feaLib] Added missing required argument to FeatureLibError exception (#2693) +- [varLib.merge] Fixed error during error reporting (#2689). Fixed undefined + ``NotANone`` variable (#2714). + +4.34.4 (released 2022-07-07) +---------------------------- + +- Fixed typo in varLib/merger.py that causes NameError merging COLR glyphs + containing more than 255 layers (#2685). + +4.34.3 (released 2022-07-07) +---------------------------- + +- [designspaceLib] Don't make up bad PS names when no STAT data (#2684) + +4.34.2 (released 2022-07-06) +---------------------------- + +- [varStore/subset] fixed KeyError exception to do with NO_VARIATION_INDEX while + subsetting varidxes in GPOS/GDEF (a08140d). + +4.34.1 (released 2022-07-06) +---------------------------- + +- [instancer] When optimizing HVAR/VVAR VarStore, use_NO_VARIATION_INDEX=False to avoid + including NO_VARIATION_INDEX in AdvWidthMap, RsbMap, LsbMap mappings, which would + push the VarIdx width to maximum (4bytes), which is not desirable. This also fixes + a hard crash when attempting to subset a varfont after it had been partially instanced + with use_NO_VARIATION_INDEX=True. + +4.34.0 (released 2022-07-06) +---------------------------- + +- [instancer] Set RIBBI bits in head and OS/2 table when cutting instances and the + subfamily nameID=2 contains strings like 'Italic' or 'Bold' (#2673). +- [otTraverse] Addded module containing methods for traversing trees of otData tables + (#2660). +- [otTables] Made DeltaSetIndexMap TTX dump less verbose by omitting no-op entries + (#2660). +- [colorLib.builder] Added option to disable PaintColrLayers's reuse of layers from + LayerList (#2660). +- [varLib] Added support for merging multiple master COLRv1 tables into a variable + COLR table (#2660, #2328). Base color glyphs of same name in different masters must have + identical paint graph structure (incl. number of layers, palette indices, number + of color line stops, corresponding paint formats at each level of the graph), + but can differ in the variable fields (e.g. PaintSolid.Alpha). PaintVar* tables + are produced when this happens and a VarStore/DeltaSetIndexMap is added to the + variable COLR table. It is possible for non-default masters to be 'sparse', i.e. + omit some of the color glyphs present in the default master. +- [feaLib] Let the Parser set nameIDs 1 through 6 that were previously reserved (#2675). +- [varLib.varStore] Support NO_VARIATION_INDEX in optimizer and instancer. +- [feaLib] Show all missing glyphs at once at end of parsing (#2665). +- [varLib.iup] Rewrite force-set conditions and limit DP loopback length (#2651). + For Noto Sans, IUP time drops from 23s down to 9s, with only a slight size increase + in the final font. This basically turns the algorithm from O(n^3) into O(n). +- [featureVars] Report about missing glyphs in substitution rules (#2654). +- [mutator/instancer] Added CLI flag to --no-recalc-timestamp (#2649). +- [SVG] Allow individual SVG documents in SVG OT table to be compressed on uncompressed, + and remember that when roundtripping to/from ttx. The SVG.docList is now a list + of SVGDocument namedtuple-like dataclass containing an extra ``compressed`` field, + and no longer a bare 3-tuple (#2645). +- [designspaceLib] Check for descriptor types with hasattr() to allow custom classes + that don't inherit the default descriptors (#2634). +- [subset] Enable sharing across subtables of extension lookups for harfbuzz packing + (#2626). Updated how table packing falls back to fontTools from harfbuzz (#2668). +- [subset] Updated default feature tags following current Harfbuzz (#2637). +- [svgLib] Fixed regex for real number to support e.g. 1e-4 in addition to 1.0e-4. + Support parsing negative rx, ry on arc commands (#2596, #2611). +- [subset] Fixed subsetting SinglePosFormat2 when ValueFormat=0 (#2603). + +4.33.3 (released 2022-04-26) +---------------------------- + +- [designspaceLib] Fixed typo in ``deepcopyExceptFonts`` method, preventing font + references to be transferred (#2600). Fixed another typo in the name of ``Range`` + dataclass's ``__post_init__`` magic method (#2597). + +4.33.2 (released 2022-04-22) +---------------------------- + +- [otBase] Make logging less verbose when harfbuzz fails to serialize. Do not exit + at the first failure but continue attempting to fix offset overflow error using + the pure-python serializer even when the ``USE_HARFBUZZ_REPACKER`` option was + explicitly set to ``True``. This is normal with fonts with relatively large + tables, at least until hb.repack implements proper table splitting. + +4.33.1 (released 2022-04-22) +---------------------------- + +- [otlLib] Put back the ``FONTTOOLS_GPOS_COMPACT_MODE`` environment variable to fix + regression in ufo2ft (and thus fontmake) introduced with v4.33.0 (#2592, #2593). + This is deprecated and will be removed one ufo2ft gets updated to use the new + config setup. + +4.33.0 (released 2022-04-21) +---------------------------- + +- [OS/2 / merge] Automatically recalculate ``OS/2.xAvgCharWidth`` after merging + fonts with ``fontTools.merge`` (#2591, #2538). +- [misc/config] Added ``fontTools.misc.configTools`` module, a generic configuration + system (#2416, #2439). + Added ``fontTools.config`` module, a fontTools-specific configuration + system using ``configTools`` above. + Attached a ``Config`` object to ``TTFont``. +- [otlLib] Replaced environment variable for GPOS compression level with an + equivalent option using the new config system. +- [designspaceLib] Incremented format version to 5.0 (#2436). + Added discrete axes, variable fonts, STAT information, either design- or + user-space location on instances. + Added ``fontTools.designspaceLib.split`` module to split a designspace + into sub-spaces that interpolate and that represent the variable fonts + listed in the document. + Made instance names optional and allow computing them from STAT data instead. + Added ``fontTools.designspaceLib.statNames`` module. + Allow instances to have the same location as a previously defined STAT label. + Deprecated some attributes: + ``SourceDescriptor``: ``copyLib``, ``copyInfo``, ``copyGroups``, ``copyFeatures``. + ``InstanceDescriptor``: ``kerning``, ``info``; ``glyphs``: use rules or sparse + sources. + For both, ``location``: use the more explicit designLocation. + Note: all are soft deprecations and existing code should keep working. + Updated documentation for Python methods and the XML format. +- [varLib] Added ``build_many`` to build several variable fonts from a single + designspace document (#2436). + Added ``fontTools.varLib.stat`` module to build STAT tables from a designspace + document. +- [otBase] Try to use the Harfbuzz Repacker for packing GSUB/GPOS tables when + ``uharfbuzz`` python bindings are available (#2552). Disable it by setting the + "fontTools.ttLib.tables.otBase:USE_HARFBUZZ_REPACKER" config option to ``False``. + If the option is set explicitly to ``True`` but ``uharfbuzz`` can't be imported + or fails to serialize for any reasons, an error will be raised (ImportError or + uharfbuzz errors). +- [CFF/T2] Ensure that ``pen.closePath()`` gets called for CFF2 charstrings (#2577). + Handle implicit CFF2 closePath within ``T2OutlineExtractor`` (#2580). + +4.32.0 (released 2022-04-08) +---------------------------- + +- [otlLib] Disable GPOS7 optimization to work around bug in Apple CoreText. + Always force Chaining GPOS8 for now (#2540). +- [glifLib] Added ``outputImpliedClosingLine=False`` parameter to ``Glyph.draw()``, + to control behaviour of ``PointToSegmentPen`` (6b4e2e7). +- [varLib.interpolatable] Check for wrong contour starting point (#2571). +- [cffLib] Remove leftover ``GlobalState`` class and fix calls to ``TopDictIndex()`` + (#2569, #2570). +- [instancer] Clear ``AxisValueArray`` if it is empty after instantiating (#2563). + +4.31.2 (released 2022-03-22) +---------------------------- + +- [varLib] fix instantiation of GPOS SinglePos values (#2555). + +4.31.1 (released 2022-03-18) +---------------------------- + +- [subset] fix subsetting OT-SVG when glyph id attribute is on the root ```` + element (#2553). + +4.31.0 (released 2022-03-18) +---------------------------- + +- [ttCollection] Fixed 'ResourceWarning: unclosed file' warning (#2549). +- [varLib.merger] Handle merging SinglePos with valueformat=0 (#2550). +- [ttFont] Update glyf's glyphOrder when calling TTFont.setGlyphOrder() (#2544). +- [ttFont] Added ``ensureDecompiled`` method to load all tables irrespective + of the ``lazy`` attribute (#2551). +- [otBase] Added ``iterSubTable`` method to iterate over BaseTable's children of + type BaseTable; useful for traversing a tree of otTables (#2551). + +4.30.0 (released 2022-03-10) +---------------------------- + +- [varLib] Added debug logger showing the glyph name for which ``gvar`` is built (#2542). +- [varLib.errors] Fixed undefined names in ``FoundANone`` and ``UnsupportedFormat`` + exceptions (ac4d5611). +- [otlLib.builder] Added ``windowsNames`` and ``macNames`` (bool) parameters to the + ``buildStatTabe`` function, so that one can select whether to only add one or both + of the two sets (#2528). +- [t1Lib] Added the ability to recreate PostScript stream (#2504). +- [name] Added ``getFirstDebugName``, ``getBest{Family,SubFamily,Full}Name`` methods (#2526). + +4.29.1 (released 2022-02-01) +---------------------------- + +- [colorLib] Fixed rounding issue with radial gradient's start/end circles inside + one another (#2521). +- [freetypePen] Handle rotate/skew transform when auto-computing width/height of the + buffer; raise PenError wen missing moveTo (#2517) + +4.29.0 (released 2022-01-24) +---------------------------- + +- [ufoLib] Fixed illegal characters and expanded reserved filenames (#2506). +- [COLRv1] Don't emit useless PaintColrLayers of lenght=1 in LayerListBuilder (#2513). +- [ttx] Removed legacy ``waitForKeyPress`` method on Windows (#2509). +- [pens] Added FreeTypePen that uses ``freetype-py`` and the pen protocol for + rasterizating outline paths (#2494). +- [unicodedata] Updated the script direction list to Unicode 14.0 (#2484). + Bumped unicodedata2 dependency to 14.0 (#2499). +- [psLib] Fixed type of ``fontName`` in ``suckfont`` (#2496). + +4.28.5 (released 2021-12-19) +---------------------------- + +- [svgPathPen] Continuation of #2471: make sure all occurrences of ``str()`` are now + replaced with user-defined ``ntos`` callable. +- [merge] Refactored code into submodules, plus several bugfixes and improvements: + fixed duplicate-glyph-resolution GSUB-lookup generation code; use tolerance in glyph + comparison for empty glyph's width; ignore space of default ignorable glyphs; + downgrade duplicates-resolution missing-GSUB from assert to warn; added --drop-tables + option (#2473, #2475, #2476). + +4.28.4 (released 2021-12-15) +---------------------------- + +- [merge] Merge GDEF marksets in Lookups properly (#2474). +- [feaLib] Have ``fontTools feaLib`` script exit with error code when build fails (#2459) +- [svgPathPen] Added ``ntos`` option to customize number formatting (e.g. rounding) (#2471). +- [subset] Speed up subsetting of large CFF fonts (#2467). +- [otTables] Speculatively promote lookups to extension to speed up compilation. If the + offset to lookup N is too big to fit in a ushort, the offset to lookup N+1 is going to + be too big as well, so we promote to extension all lookups from lookup N onwards (#2465). + +4.28.3 (released 2021-12-03) +---------------------------- + +- [subset] Fixed bug while subsetting ``COLR`` table, whereby incomplete layer records + pointing to missing glyphs were being retained leading to ``struct.error`` upon + compiling. Make it so that ``glyf`` glyph closure, which follows the ``COLR`` glyph + closure, does not influence the ``COLR`` table subsetting (#2461, #2462). +- [docs] Fully document the ``cmap`` and ``glyf`` tables (#2454, #2457). +- [colorLib.unbuilder] Fixed CLI by deleting no longer existing parameter (180bb1867). + +4.28.2 (released 2021-11-22) +---------------------------- + +- [otlLib] Remove duplicates when building coverage (#2433). +- [docs] Add interrogate configuration (#2443). +- [docs] Remove comment about missing “start” optional argument to ``calcChecksum`` (#2448). +- [cu2qu/cli] Adapt to the latest ufoLib2. +- [subset] Support subsetting SVG table and remove it from the list of drop by default tables (#534). +- [subset] add ``--pretty-svg`` option to pretty print SVG table contents (#2452). +- [merge] Support merging ``CFF`` tables (CID-keyed ``CFF`` is still not supported) (#2447). +- [merge] Support ``--output-file`` (#2447). +- [docs] Split table docs into individual pages (#2444). +- [feaLib] Forbid empty classes (#2446). +- [docs] Improve documentation for ``fontTools.ttLib.ttFont`` (#2442). + +4.28.1 (released 2021-11-08) +---------------------------- + +- [subset] Fixed AttributeError while traversing a color glyph's Paint graph when there is no + LayerList, which is optional (#2441). + +4.28.0 (released 2021-11-05) +---------------------------- + +- Dropped support for EOL Python 3.6, require Python 3.7 (#2417). +- [ufoLib/glifLib] Make filename-clash checks faster by using a set instead of a list (#2422). +- [subset] Don't crash if optional ClipList and LayerList are ``None`` (empty) (#2424, 2439). +- [OT-SVG] Removed support for old deprecated version 1 and embedded color palettes, + which were never officially part of the OpenType SVG spec. Upon compile, reuse offsets + to SVG documents that are identical (#2430). +- [feaLib] Added support for Variable Feature File syntax. This is experimental and subject + to change until it is finalized in the Adobe FEA spec (#2432). +- [unicodedata] Update Scripts/ScriptExtensions/Blocks to UnicodeData 14.0 (#2437). + +4.27.1 (released 2021-09-23) +---------------------------- + +- [otlLib] Fixed error when chained contextual lookup builder overflows (#2404, #2411). +- [bezierTools] Fixed two floating-point bugs: one when computing `t` for a point + lying on an almost horizontal/vertical line; another when computing the intersection + point between a curve and a line (#2413). + +4.27.0 (released 2021-09-14) +---------------------------- + +- [ttLib/otTables] Cleaned up virtual GID handling: allow virtual GIDs in ``Coverage`` + and ``ClassDef`` readers; removed unused ``allowVID`` argument from ``TTFont`` + constructor, and ``requireReal`` argument in ``TTFont.getGlyphID`` method. + Make ``TTFont.setGlyphOrder`` clear reverse glyphOrder map, and assume ``glyphOrder`` + internal attribute is never modified outside setGlyphOrder; added ``TTFont.getGlyphNameMany`` + and ``getGlyphIDMany`` (#1536, #1654, #2334, #2398). +- [py23] Dropped internal use of ``fontTools.py23`` module to fix deprecation warnings + in client code that imports from fontTools (#2234, #2399, #2400). +- [subset] Fix subsetting COLRv1 clip boxes when font is loaded lazily (#2408). + +4.26.2 (released 2021-08-09) +---------------------------- + +- [otTables] Added missing ``CompositeMode.PLUS`` operator (#2390). + +4.26.1 (released 2021-08-03) +---------------------------- + +- [transform] Added ``transformVector`` and ``transformVectors`` methods to the + ``Transform`` class. Similar to ``transformPoint`` but ignore the translation + part (#2386). + +4.26.0 (released 2021-08-03) +---------------------------- + +- [xmlWriter] Default to ``"\n"`` for ``newlinestr`` instead of platform-specific + ``os.linesep`` (#2384). +- [otData] Define COLRv1 ClipList and ClipBox (#2379). +- [removeOverlaps/instancer] Added --ignore-overlap-errors option to work around + Skia PathOps.Simplify bug (#2382, #2363, google/fonts#3365). +- NOTE: This will be the last version to support Python 3.6. FontTools will require + Python 3.7 or above from the next release (#2350) + +4.25.2 (released 2021-07-26) +---------------------------- + +- [COLRv1] Various changes to sync with the latest CORLv1 draft spec. In particular: + define COLR.VarIndexMap, remove/inline ColorIndex struct, add VarIndexBase to ``PaintVar*`` tables (#2372); + add reduced-precicion specialized transform Paints; + define Angle as fraction of half circle encoded as F2Dot14; + use FWORD (int16) for all Paint center coordinates; + change PaintTransform to have an offset to Affine2x3; +- [ttLib] when importing XML, only set sfntVersion if the font has no reader and is empty (#2376) + +4.25.1 (released 2021-07-16) +---------------------------- + +- [ttGlyphPen] Fixed bug in ``TTGlyphPointPen``, whereby open contours (i.e. starting + with segmentType "move") would throw ``NotImplementedError``. They are now treated + as if they are closed, like with the ``TTGlyphPen`` (#2364, #2366). + +4.25.0 (released 2021-07-05) +---------------------------- + +- [tfmLib] Added new library for parsing TeX Font Metric (TFM) files (#2354). +- [TupleVariation] Make shared tuples order deterministic on python < 3.7 where + Counter (subclass of dict) doesn't remember insertion order (#2351, #2353). +- [otData] Renamed COLRv1 structs to remove 'v1' suffix and match the updated draft + spec: 'LayerV1List' -> 'LayerList', 'BaseGlyphV1List' -> 'BaseGlyphList', + 'BaseGlyphV1Record' -> 'BaseGlyphPaintRecord' (#2346). + Added 8 new ``PaintScale*`` tables: with/without centers, uniform vs non-uniform. + Added ``*AroundCenter`` variants to ``PaintRotate`` and ``PaintSkew``: the default + versions no longer have centerX/Y, but default to origin. + ``PaintRotate``, ``PaintSkew`` and ``PaintComposite`` formats were re-numbered. + NOTE: these are breaking changes; clients using the experimental COLRv1 API will + have to be updated (#2348). +- [pointPens] Allow ``GuessSmoothPointPen`` to accept a tolerance. Fixed call to + ``math.atan2`` with x/y parameters inverted. Sync the code with fontPens (#2344). +- [post] Fixed parsing ``post`` table format 2.0 when it contains extra garbage + at the end of the stringData array (#2314). +- [subset] drop empty features unless 'size' with FeatureParams table (#2324). +- [otlLib] Added ``otlLib.optimize`` module; added GPOS compaction algorithm. + The compaction can be run on existing fonts with ``fonttools otlLib.optimize`` + or using the snippet ``compact_gpos.py``. There's experimental support for + compacting fonts at compilation time using an environment variable, but that + might be removed later (#2326). + +4.24.4 (released 2021-05-25) +---------------------------- + +- [subset/instancer] Fixed ``AttributeError`` when instantiating a VF that + contains GPOS ValueRecords with ``Device`` tables but without the respective + non-Device values (e.g. ``XAdvDevice`` without ``XAdvance``). When not + explicitly set, the latter are assumed to be 0 (#2323). + +4.24.3 (released 2021-05-20) +---------------------------- + +- [otTables] Fixed ``AttributeError`` in methods that split LigatureSubst, + MultipleSubst and AlternateSubst subtables when an offset overflow occurs. + The ``Format`` attribute was removed in v4.22.0 (#2319). + +4.24.2 (released 2021-05-20) +---------------------------- + +- [ttGlyphPen] Fixed typing annotation of TTGlyphPen glyphSet parameter (#2315). +- Fixed two instances of DeprecationWarning: invalid escape sequence (#2311). + +4.24.1 (released 2021-05-20) +---------------------------- + +- [subset] Fixed AttributeError when SinglePos subtable has None Value (ValueFormat 0) + (#2312, #2313). + +4.24.0 (released 2021-05-17) +---------------------------- + +- [pens] Add ``ttGlyphPen.TTGlyphPointPen`` similar to ``TTGlyphPen`` (#2205). + +4.23.1 (released 2021-05-14) +---------------------------- + +- [subset] Fix ``KeyError`` after subsetting ``COLR`` table that initially contains + both v0 and v1 color glyphs when the subset only requested v1 glyphs; we were + not pruning the v0 portion of the table (#2308). +- [colorLib] Set ``LayerV1List`` attribute to ``None`` when empty, it's optional + in CORLv1 (#2308). + +4.23.0 (released 2021-05-13) +---------------------------- + +- [designspaceLib] Allow to use ``\\UNC`` absolute paths on Windows (#2299, #2306). +- [varLib.merger] Fixed bug where ``VarLibMergeError`` was raised with incorrect + parameters (#2300). +- [feaLib] Allow substituting a glyph class with ``NULL`` to delete multiple glyphs + (#2303). +- [glyf] Fixed ``NameError`` exception in ``getPhantomPoints`` (#2295, #2305). +- [removeOverlaps] Retry pathops.simplify after rounding path coordinates to integers + if it fails the first time using floats, to work around a rare and hard to debug + Skia bug (#2288). +- [varLib] Added support for building, reading, writing and optimizing 32-bit + ``ItemVariationStore`` as used in COLRv1 table (#2285). +- [otBase/otConverters] Add array readers/writers for int types (#2285). +- [feaLib] Allow more than one lookahead glyph/class in contextual positioning with + "value at end" (#2293, #2294). +- [COLRv1] Default varIdx should be 0xFFFFFFFF (#2297, #2298). +- [pens] Make RecordingPointPen actually pass on identifiers; replace asserts with + explicit ``PenError`` exception (#2284). +- [mutator] Round lsb for CF2 fonts as well (#2286). + +4.22.1 (released 2021-04-26) +---------------------------- + +- [feaLib] Skip references to named lookups if the lookup block definition + is empty, similarly to makeotf. This also fixes an ``AttributeError`` while + generating ``aalt`` feature (#2276, #2277). +- [subset] Fixed bug with ``--no-hinting`` implementation for Device tables (#2272, + #2275). The previous code was alwyas dropping Device tables if no-hinting was + requested, but some Device tables (DeltaFormat=0x8000) are also used to encode + variation indices and need to be retained. +- [otBase] Fixed bug in getting the ValueRecordSize when decompiling ``MVAR`` + table with ``lazy=True`` (#2273, #2274). +- [varLib/glyf/gvar] Optimized and simplified ``GlyphCoordinates`` and + ``TupleVariation`` classes, use ``bytearray`` where possible, refactored + phantom-points calculations. We measured about 30% speedup in total time + of loading master ttfs, building gvar, and saving (#2261, #2266). +- [subset] Fixed ``AssertionError`` while pruning unused CPAL palettes when + ``0xFFFF`` is present (#2257, #2259). + +4.22.0 (released 2021-04-01) +---------------------------- + +- [ttLib] Remove .Format from Coverage, ClassDef, SingleSubst, LigatureSubst, + AlternateSubst, MultipleSubst (#2238). + ATTENTION: This will change your TTX dumps! +- [misc.arrayTools] move Vector to its own submodule, and rewrite as a tuple + subclass (#2201). +- [docs] Added a terminology section for varLib (#2209). +- [varLib] Move rounding to VariationModel, to avoid error accumulation from + multiple deltas (#2214) +- [varLib] Explain merge errors in more human-friendly terms (#2223, #2226) +- [otlLib] Correct some documentation (#2225) +- [varLib/otlLib] Allow merging into VariationFont without first saving GPOS + PairPos2 (#2229) +- [subset] Improve PairPosFormat2 subsetting (#2221) +- [ttLib] TTFont.save: create file on disk as late as possible (#2253) +- [cffLib] Add missing CFF2 dict operators LanguageGroup and ExpansionFactor + (#2249) + ATTENTION: This will change your TTX dumps! + +4.21.1 (released 2021-02-26) +---------------------------- + +- [pens] Reverted breaking change that turned ``AbstractPen`` and ``AbstractPointPen`` + into abstract base classes (#2164, #2198). + +4.21.0 (released 2021-02-26) +---------------------------- + +- [feaLib] Indent anchor statements in ``asFea()`` to make them more legible and + diff-able (#2193). +- [pens] Turn ``AbstractPen`` and ``AbstractPointPen`` into abstract base classes + (#2164). +- [feaLib] Added support for parsing and building ``STAT`` table from AFDKO feature + files (#2039). +- [instancer] Added option to update name table of generated instance using ``STAT`` + table's axis values (#2189). +- [bezierTools] Added functions to compute bezier point-at-time, as well as line-line, + curve-line and curve-curve intersections (#2192). + +4.20.0 (released 2021-02-15) +---------------------------- + +- [COLRv1] Added ``unbuildColrV1`` to deconstruct COLRv1 otTables to raw json-able + data structure; it does the reverse of ``buildColrV1`` (#2171). +- [feaLib] Allow ``sub X by NULL`` sequence to delete a glyph (#2170). +- [arrayTools] Fixed ``Vector`` division (#2173). +- [COLRv1] Define new ``PaintSweepGradient`` (#2172). +- [otTables] Moved ``Paint.Format`` enum class outside of ``Paint`` class definition, + now named ``PaintFormat``. It was clashing with paint instance ``Format`` attribute + and thus was breaking lazy load of COLR table which relies on magic ``__getattr__`` + (#2175). +- [COLRv1] Replace hand-coded builder functions with otData-driven dynamic + implementation (#2181). +- [COLRv1] Define additional static (non-variable) Paint formats (#2181). +- [subset] Added support for subsetting COLR v1 and CPAL tables (#2174, #2177). +- [fontBuilder] Allow ``setupFvar`` to optionally take ``designspaceLib.AxisDescriptor`` + objects. Added new ``setupAvar`` method. Support localised names for axes and + named instances (#2185). + +4.19.1 (released 2021-01-28) +---------------------------- + +- [woff2] An initial off-curve point with an overlap flag now stays an off-curve + point after compression. + +4.19.0 (released 2021-01-25) +---------------------------- + +- [codecs] Handle ``errors`` parameter different from 'strict' for the custom + extended mac encodings (#2137, #2132). +- [featureVars] Raise better error message when a script is missing the required + default language system (#2154). +- [COLRv1] Avoid abrupt change caused by rounding ``PaintRadialGradient.c0`` when + the start circle almost touches the end circle's perimeter (#2148). +- [COLRv1] Support building unlimited lists of paints as 255-ary trees of + ``PaintColrLayers`` tables (#2153). +- [subset] Prune redundant format-12 cmap subtables when all non-BMP characters + are dropped (#2146). +- [basePen] Raise ``MissingComponentError`` instead of bare ``KeyError`` when a + referenced component is missing (#2145). + +4.18.2 (released 2020-12-16) +---------------------------- + +- [COLRv1] Implemented ``PaintTranslate`` paint format (#2129). +- [varLib.cff] Fixed unbound local variable error (#1787). +- [otlLib] Don't crash when creating OpenType class definitions if some glyphs + occur more than once (#2125). + +4.18.1 (released 2020-12-09) +---------------------------- + +- [colorLib] Speed optimization for ``LayerV1ListBuilder`` (#2119). +- [mutator] Fixed missing tab in ``interpolate_cff2_metrics`` (0957dc7a). + +4.18.0 (released 2020-12-04) +---------------------------- + +- [COLRv1] Update to latest draft: added ``PaintRotate`` and ``PaintSkew`` (#2118). +- [woff2] Support new ``brotlicffi`` bindings for PyPy (#2117). +- [glifLib] Added ``expectContentsFile`` parameter to ``GlyphSet``, for use when + reading existing UFOs, to comply with the specification stating that a + ``contents.plist`` file must exist in a glyph set (#2114). +- [subset] Allow ``LangSys`` tags in ``--layout-scripts`` option (#2112). For example: + ``--layout-scripts=arab.dflt,arab.URD,latn``; this will keep ``DefaultLangSys`` + and ``URD`` language for ``arab`` script, and all languages for ``latn`` script. +- [varLib.interpolatable] Allow UFOs to be checked; report open paths, non existant + glyphs; add a ``--json`` option to produce a machine-readable list of + incompatibilities +- [pens] Added ``QuartzPen`` to create ``CGPath`` from glyph outlines on macOS. + Requires pyobjc (#2107). +- [feaLib] You can export ``FONTTOOLS_LOOKUP_DEBUGGING=1`` to enable feature file + debugging info stored in ``Debg`` table (#2106). +- [otlLib] Build more efficient format 1 and format 2 contextual lookups whenever + possible (#2101). + +4.17.1 (released 2020-11-16) +---------------------------- + +- [colorLib] Fixed regression in 4.17.0 when building COLR v0 table; when color + layers are stored in UFO lib plist, we can't distinguish tuples from lists so + we need to accept either types (e5439eb9, googlefonts/ufo2ft/issues#426). + +4.17.0 (released 2020-11-12) +---------------------------- + +- [colorLib/otData] Updated to latest draft ``COLR`` v1 spec (#2092). +- [svgLib] Fixed parsing error when arc commands' boolean flags are not separated + by space or comma (#2094). +- [varLib] Interpret empty non-default glyphs as 'missing', if the default glyph is + not empty (#2082). +- [feaLib.builder] Only stash lookup location for ``Debg`` if ``Builder.buildLookups_`` + has cooperated (#2065, #2067). +- [varLib] Fixed bug in VarStore optimizer (#2073, #2083). +- [varLib] Add designspace lib key for custom feavar feature tag (#2080). +- Add HashPointPen adapted from psautohint. With this pen, a hash value of a glyph + can be computed, which can later be used to detect glyph changes (#2005). + +4.16.1 (released 2020-10-05) +---------------------------- + +- [varLib.instancer] Fixed ``TypeError`` exception when instantiating a VF with + a GSUB table 1.1 in which ``FeatureVariations`` attribute is present but set to + ``None`` -- indicating that optional ``FeatureVariations`` is missing (#2077). +- [glifLib] Make ``x`` and ``y`` attributes of the ``point`` element required + even when validation is turned off, and raise a meaningful ``GlifLibError`` + message when that happens (#2075). + +4.16.0 (released 2020-09-30) +---------------------------- + +- [removeOverlaps] Added new module and ``removeOverlaps`` function that merges + overlapping contours and components in TrueType glyphs. It requires the + `skia-pathops `__ module. + Note that removing overlaps invalidates the TrueType hinting (#2068). +- [varLib.instancer] Added ``--remove-overlaps`` command-line option. + The ``overlap`` option in ``instantiateVariableFont`` now takes an ``OverlapMode`` + enum: 0: KEEP_AND_DONT_SET_FLAGS, 1: KEEP_AND_SET_FLAGS (default), and 2: REMOVE. + The latter is equivalent to calling ``removeOverlaps`` on the generated static + instance. The option continues to accept ``bool`` value for backward compatibility. + + +4.15.0 (released 2020-09-21) +---------------------------- + +- [plistlib] Added typing annotations to plistlib module. Set up mypy static + typechecker to run automatically on CI (#2061). +- [ttLib] Implement private ``Debg`` table, a reverse-DNS namespaced JSON dict. +- [feaLib] Optionally add an entry into the ``Debg`` table with the original + lookup name (if any), feature name / script / language combination (if any), + and original source filename and line location. Annotate the ttx output for + a lookup with the information from the Debg table (#2052). +- [sfnt] Disabled checksum checking by default in ``SFNTReader`` (#2058). +- [Docs] Document ``mtiLib`` module (#2027). +- [varLib.interpolatable] Added checks for contour node count and operation type + of each node (#2054). +- [ttLib] Added API to register custom table packer/unpacker classes (#2055). + +4.14.0 (released 2020-08-19) +---------------------------- + +- [feaLib] Allow anonymous classes in LookupFlags definitions (#2037). +- [Docs] Better document DesignSpace rules processing order (#2041). +- [ttLib] Fixed 21-year old bug in ``maxp.maxComponentDepth`` calculation (#2044, + #2045). +- [varLib.models] Fixed misspelled argument name in CLI entry point (81d0042a). +- [subset] When subsetting GSUB v1.1, fixed TypeError by checking whether the + optional FeatureVariations table is present (e63ecc5b). +- [Snippets] Added snippet to show how to decompose glyphs in a TTF (#2030). +- [otlLib] Generate GSUB type 5 and GPOS type 7 contextual lookups where appropriate + (#2016). + +4.13.0 (released 2020-07-10) +---------------------------- + +- [feaLib/otlLib] Moved lookup subtable builders from feaLib to otlLib; refactored + some common code (#2004, #2007). +- [docs] Document otlLib module (#2009). +- [glifLib] Fixed bug with some UFO .glif filenames clashing on case-insensitive + filesystems (#2001, #2002). +- [colorLib] Updated COLRv1 implementation following changes in the draft spec: + (#2008, googlefonts/colr-gradients-spec#24). + +4.12.1 (released 2020-06-16) +---------------------------- + +- [_n_a_m_e] Fixed error in ``addMultilingualName`` with one-character names. + Only attempt to recovered malformed UTF-16 data from a ``bytes`` string, + not from unicode ``str`` (#1997, #1998). + +4.12.0 (released 2020-06-09) +---------------------------- + +- [otlLib/varLib] Ensure that the ``AxisNameID`` in the ``STAT`` and ``fvar`` + tables is grater than 255 as per OpenType spec (#1985, #1986). +- [docs] Document more modules in ``fontTools.misc`` package: ``filenames``, + ``fixedTools``, ``intTools``, ``loggingTools``, ``macCreatorType``, ``macRes``, + ``plistlib`` (#1981). +- [OS/2] Don't calculate whole sets of unicode codepoints, use faster and more memory + efficient ranges and bisect lookups (#1984). +- [voltLib] Support writing back abstract syntax tree as VOLT data (#1983). +- [voltLib] Accept DO_NOT_TOUCH_CMAP keyword (#1987). +- [subset/merge] Fixed a namespace clash involving a private helper class (#1955). + +4.11.0 (released 2020-05-28) +---------------------------- + +- [feaLib] Introduced ``includeDir`` parameter on Parser and IncludingLexer to + explicitly specify the directory to search when ``include()`` statements are + encountered (#1973). +- [ufoLib] Silently delete duplicate glyphs within the same kerning group when reading + groups (#1970). +- [ttLib] Set version of COLR table when decompiling COLRv1 (commit 9d8a7e2). + +4.10.2 (released 2020-05-20) +---------------------------- + +- [sfnt] Fixed ``NameError: SimpleNamespace`` while reading TTC header. The regression + was introduced with 4.10.1 after removing ``py23`` star import. + +4.10.1 (released 2020-05-19) +---------------------------- + +- [sfnt] Make ``SFNTReader`` pickleable even when TTFont is loaded with lazy=True + option and thus keeps a reference to an external file (#1962, #1967). +- [feaLib.ast] Restore backward compatibility (broken in 4.10 with #1905) for + ``ChainContextPosStatement`` and ``ChainContextSubstStatement`` classes. + Make them accept either list of lookups or list of lists of lookups (#1961). +- [docs] Document some modules in ``fontTools.misc`` package: ``arrayTools``, + ``bezierTools`` ``cliTools`` and ``eexec`` (#1956). +- [ttLib._n_a_m_e] Fixed ``findMultilingualName()`` when name record's ``string`` is + encoded as bytes sequence (#1963). + +4.10.0 (released 2020-05-15) +---------------------------- + +- [varLib] Allow feature variations to be active across the entire space (#1957). +- [ufoLib] Added support for ``formatVersionMinor`` in UFO's ``fontinfo.plist`` and for + ``formatMinor`` attribute in GLIF file as discussed in unified-font-object/ufo-spec#78. + No changes in reading or writing UFOs until an upcoming (non-0) minor update of the + UFO specification is published (#1786). +- [merge] Fixed merging fonts with different versions of ``OS/2`` table (#1865, #1952). +- [subset] Fixed ``AttributeError`` while subsetting ``ContextSubst`` and ``ContextPos`` + Format 3 subtable (#1879, #1944). +- [ttLib.table._m_e_t_a] if data happens to be ascii, emit comment in TTX (#1938). +- [feaLib] Support multiple lookups per glyph position (#1905). +- [psCharStrings] Use inheritance to avoid repeated code in initializer (#1932). +- [Doc] Improved documentation for the following modules: ``afmLib`` (#1933), ``agl`` + (#1934), ``cffLib`` (#1935), ``cu2qu`` (#1937), ``encodings`` (#1940), ``feaLib`` + (#1941), ``merge`` (#1949). +- [Doc] Split off developer-centric info to new page, making front page of docs more + user-focused. List all utilities and sub-modules with brief descriptions. + Make README more concise and focused (#1914). +- [otlLib] Add function to build STAT table from high-level description (#1926). +- [ttLib._n_a_m_e] Add ``findMultilingualName()`` method (#1921). +- [unicodedata] Update ``RTL_SCRIPTS`` for Unicode 13.0 (#1925). +- [gvar] Sort ``gvar`` XML output by glyph name, not glyph order (#1907, #1908). +- [Doc] Added help options to ``fonttools`` command line tool (#1913, #1920). + Ensure all fonttools CLI tools have help documentation (#1948). +- [ufoLib] Only write fontinfo.plist when there actually is content (#1911). + +4.9.0 (released 2020-04-29) +--------------------------- + +- [subset] Fixed subsetting of FeatureVariations table. The subsetter no longer drops + FeatureVariationRecords that have empty substitutions as that will keep the search + going and thus change the logic. It will only drop empty records that occur at the + end of the FeatureVariationRecords array (#1881). +- [subset] Remove FeatureVariations table and downgrade GSUB/GPOS to version 0x10000 + when FeatureVariations contain no FeatureVariationRecords after subsetting (#1903). +- [agl] Add support for legacy Adobe Glyph List of glyph names in ``fontTools.agl`` + (#1895). +- [feaLib] Ignore superfluous script statements (#1883). +- [feaLib] Hide traceback by default on ``fonttools feaLib`` command line. + Use ``--traceback`` option to show (#1898). +- [feaLib] Check lookup index in chaining sub/pos lookups and print better error + message (#1896, #1897). +- [feaLib] Fix building chained alt substitutions (#1902). +- [Doc] Included all fontTools modules in the sphinx-generated documentation, and + published it to ReadTheDocs for continuous documentation of the fontTools project + (#1333). Check it out at https://fonttools.readthedocs.io/. Thanks to Chris Simpkins! +- [transform] The ``Transform`` class is now subclass of ``typing.NamedTuple``. No + change in functionality (#1904). + + +4.8.1 (released 2020-04-17) +--------------------------- + +- [feaLib] Fixed ``AttributeError: 'NoneType' has no attribute 'getAlternateGlyphs'`` + when ``aalt`` feature references a chain contextual substitution lookup + (googlefonts/fontmake#648, #1878). + +4.8.0 (released 2020-04-16) +--------------------------- + +- [feaLib] If Parser is initialized without a ``glyphNames`` parameter, it cannot + distinguish between a glyph name containing an hyphen, or a range of glyph names; + instead of raising an error, it now interprets them as literal glyph names, while + also outputting a logging warning to alert user about the ambiguity (#1768, #1870). +- [feaLib] When serializing AST to string, emit spaces around hyphens that denote + ranges. Also, fixed an issue with CID ranges when round-tripping AST->string->AST + (#1872). +- [Snippets/otf2ttf] In otf2ttf.py script update LSB in hmtx to match xMin (#1873). +- [colorLib] Added experimental support for building ``COLR`` v1 tables as per + the `colr-gradients-spec `__ + draft proposal. **NOTE**: both the API and the XML dump of ``COLR`` v1 are + susceptible to change while the proposal is being discussed and formalized (#1822). + +4.7.0 (released 2020-04-03) +--------------------------- + +- [cu2qu] Added ``fontTools.cu2qu`` package, imported from the original + `cu2qu `__ project. The ``cu2qu.pens`` module + was moved to ``fontTools.pens.cu2quPen``. The optional cu2qu extension module + can be compiled by installing `Cython `__ before installing + fonttools from source (i.e. git repo or sdist tarball). The wheel package that + is published on PyPI (i.e. the one ``pip`` downloads, unless ``--no-binary`` + option is used), will continue to be pure-Python for now (#1868). + +4.6.0 (released 2020-03-24) +--------------------------- + +- [varLib] Added support for building variable ``BASE`` table version 1.1 (#1858). +- [CPAL] Added ``fromRGBA`` method to ``Color`` class (#1861). + + +4.5.0 (released 2020-03-20) +--------------------------- + +- [designspaceLib] Added ``add{Axis,Source,Instance,Rule}Descriptor`` methods to + ``DesignSpaceDocument`` class, to initialize new descriptor objects using keyword + arguments, and at the same time append them to the current document (#1860). +- [unicodedata] Update to Unicode 13.0 (#1859). + +4.4.3 (released 2020-03-13) +--------------------------- + +- [varLib] Always build ``gvar`` table for TrueType-flavored Variable Fonts, + even if it contains no variation data. The table is required according to + the OpenType spec (#1855, #1857). + +4.4.2 (released 2020-03-12) +--------------------------- + +- [ttx] Annotate ``LookupFlag`` in XML dump with comment explaining what bits + are set and what they mean (#1850). +- [feaLib] Added more descriptive message to ``IncludedFeaNotFound`` error (#1842). + +4.4.1 (released 2020-02-26) +--------------------------- + +- [woff2] Skip normalizing ``glyf`` and ``loca`` tables if these are missing from + a font (e.g. in NotoColorEmoji using ``CBDT/CBLC`` tables). +- [timeTools] Use non-localized date parsing in ``timestampFromString``, to fix + error when non-English ``LC_TIME`` locale is set (#1838, #1839). +- [fontBuilder] Make sure the CFF table generated by fontBuilder can be used by varLib + without having to compile and decompile the table first. This was breaking in + converting the CFF table to CFF2 due to some unset attributes (#1836). + +4.4.0 (released 2020-02-18) +--------------------------- + +- [colorLib] Added ``fontTools.colorLib.builder`` module, initially with ``buildCOLR`` + and ``buildCPAL`` public functions. More color font formats will follow (#1827). +- [fontBuilder] Added ``setupCOLR`` and ``setupCPAL`` methods (#1826). +- [ttGlyphPen] Quantize ``GlyphComponent.transform`` floats to ``F2Dot14`` to fix + round-trip issue when computing bounding boxes of transformed components (#1830). +- [glyf] If a component uses reference points (``firstPt`` and ``secondPt``) for + alignment (instead of X and Y offsets), compute the effective translation offset + *after* having applied any transform (#1831). +- [glyf] When all glyphs have zero contours, compile ``glyf`` table data as a single + null byte in order to pass validation by OTS and Windows (#1829). +- [feaLib] Parsing feature code now ensures that referenced glyph names are part of + the known glyph set, unless a glyph set was not provided. +- [varLib] When filling in the default axis value for a missing location of a source or + instance, correctly map the value forward. +- [varLib] The avar table can now contain mapping output values that are greater than + OR EQUAL to the preceeding value, as the avar specification allows this. +- [varLib] The errors of the module are now ordered hierarchically below VarLibError. + See #1821. + +4.3.0 (released 2020-02-03) +--------------------------- + +- [EBLC/CBLC] Fixed incorrect padding length calculation for Format 3 IndexSubTable + (#1817, #1818). +- [varLib] Fixed error when merging OTL tables and TTFonts were loaded as ``lazy=True`` + (#1808, #1809). +- [varLib] Allow to use master fonts containing ``CFF2`` table when building VF (#1816). +- [ttLib] Make ``recalcBBoxes`` option work also with ``CFF2`` table (#1816). +- [feaLib] Don't reset ``lookupflag`` in lookups defined inside feature blocks. + They will now inherit the current ``lookupflag`` of the feature. This is what + Adobe ``makeotf`` also does in this case (#1815). +- [feaLib] Fixed bug with mixed single/multiple substitutions. If a single substitution + involved a glyph class, we were incorrectly using only the first glyph in the class + (#1814). + +4.2.5 (released 2020-01-29) +--------------------------- + +- [feaLib] Do not fail on duplicate multiple substitutions, only warn (#1811). +- [subset] Optimize SinglePos subtables to Format 1 if all ValueRecords are the same + (#1802). + +4.2.4 (released 2020-01-09) +--------------------------- + +- [unicodedata] Update RTL_SCRIPTS for Unicode 11 and 12. + +4.2.3 (released 2020-01-07) +--------------------------- + +- [otTables] Fixed bug when splitting `MarkBasePos` subtables as offsets overflow. + The mark class values in the split subtable were not being updated, leading to + invalid mark-base attachments (#1797, googlefonts/noto-source#145). +- [feaLib] Only log a warning instead of error when features contain duplicate + substitutions (#1767). +- [glifLib] Strip XML comments when parsing with lxml (#1784, #1785). + +4.2.2 (released 2019-12-12) +--------------------------- + +- [subset] Fixed issue with subsetting FeatureVariations table when the index + of features changes as features get dropped. The feature index need to be + remapped to point to index of the remaining features (#1777, #1782). +- [fontBuilder] Added `addFeatureVariations` method to `FontBuilder` class. This + is a shorthand for calling `featureVars.addFeatureVariations` on the builder's + TTFont object (#1781). +- [glyf] Fixed the flags bug in glyph.drawPoints() like we did for glyph.draw() + (#1771, #1774). + +4.2.1 (released 2019-12-06) +--------------------------- + +- [glyf] Use the ``flagOnCurve`` bit mask in ``glyph.draw()``, so that we ignore + the ``overlap`` flag that may be set when instantiating variable fonts (#1771). + +4.2.0 (released 2019-11-28) +--------------------------- + +- [pens] Added the following pens: + + * ``roundingPen.RoundingPen``: filter pen that rounds coordinates and components' + offsets to integer; + * ``roundingPen.RoundingPointPen``: like the above, but using PointPen protocol. + * ``filterPen.FilterPointPen``: base class for filter point pens; + * ``transformPen.TransformPointPen``: filter point pen to apply affine transform; + * ``recordingPen.RecordingPointPen``: records and replays point-pen commands. + +- [ttGlyphPen] Always round float coordinates and component offsets to integers + (#1763). +- [ufoLib] When converting kerning groups from UFO2 to UFO3, avoid confusing + groups with the same name as one of the glyphs (#1761, #1762, + unified-font-object/ufo-spec#98). + +4.1.0 (released 2019-11-18) +--------------------------- + +- [instancer] Implemented restricting axis ranges (level 3 partial instancing). + You can now pass ``{axis_tag: (min, max)}`` tuples as input to the + ``instantiateVariableFont`` function. Note that changing the default axis + position is not supported yet. The command-line script also accepts axis ranges + in the form of colon-separated float values, e.g. ``wght=400:700`` (#1753, #1537). +- [instancer] Never drop STAT ``DesignAxis`` records, but only prune out-of-range + ``AxisValue`` records. +- [otBase/otTables] Enforce that VarStore.RegionAxisCount == fvar.axisCount, even + when regions list is empty to appease OTS < v8.0 (#1752). +- [designspaceLib] Defined new ``processing`` attribute for ```` element, + with values "first" or "last", plus other editorial changes to DesignSpace + specification. Bumped format version to 4.1 (#1750). +- [varLib] Improved error message when masters' glyph orders do not match (#1758, + #1759). +- [featureVars] Allow to specify custom feature tag in ``addFeatureVariations``; + allow said feature to already exist, in which case we append new lookup indices + to existing features. Implemented ```` attribute ``processing`` according to + DesignSpace specification update in #1750. Depending on this flag, we generate + either an 'rvrn' (always processed first) or a 'rclt' feature (follows lookup order, + therefore last) (#1747, #1625, #1371). +- [ttCollection] Added support for context manager auto-closing via ``with`` statement + like with ``TTFont`` (#1751). +- [unicodedata] Require unicodedata2 >= 12.1.0. +- [py2.py3] Removed yet more PY2 vestiges (#1743). +- [_n_a_m_e] Fixed issue when comparing NameRecords with different string types (#1742). +- [fixedTools] Changed ``fixedToFloat`` to not do any rounding but simply return + ``value / (1 << precisionBits)``. Added ``floatToFixedToStr`` and + ``strToFixedToFloat`` functions to be used when loading from or dumping to XML. + Fixed values (e.g. fvar axes and instance coordinates, avar mappings, etc.) are + are now stored as un-rounded decimal floats upon decompiling (#1740, #737). +- [feaLib] Fixed handling of multiple ``LigatureCaret`` statements for the same glyph. + Only the first rule per glyph is used, additional ones are ignored (#1733). + +4.0.2 (released 2019-09-26) +--------------------------- + +- [voltLib] Added support for ``ALL`` and ``NONE`` in ``PROCESS_MARKS`` (#1732). +- [Silf] Fixed issue in ``Silf`` table compilation and decompilation regarding str vs + bytes in python3 (#1728). +- [merge] Handle duplicate glyph names better: instead of appending font index to + all glyph names, use similar code like we use in ``post`` and ``CFF`` tables (#1729). + +4.0.1 (released 2019-09-11) +--------------------------- + +- [otTables] Support fixing offset overflows in ``MultipleSubst`` lookup subtables + (#1706). +- [subset] Prune empty strikes in ``EBDT`` and ``CBDT`` table data (#1698, #1633). +- [pens] Fixed issue in ``PointToSegmentPen`` when last point of closed contour has + same coordinates as the starting point and was incorrectly dropped (#1720). +- [Graphite] Fixed ``Sill`` table output to pass OTS (#1705). +- [name] Added ``removeNames`` method to ``table__n_a_m_e`` class (#1719). +- [ttLib] Added aliases for renamed entries ``ascender`` and ``descender`` in + ``hhea`` table (#1715). + +4.0.0 (released 2019-08-22) +--------------------------- + +- NOTE: The v4.x version series only supports Python 3.6 or greater. You can keep + using fonttools 3.x if you need support for Python 2. +- [py23] Removed all the python2-only code since it is no longer reachable, thus + unused; only the Python3 symbols were kept, but these are no-op. The module is now + DEPRECATED and will removed in the future. +- [ttLib] Fixed UnboundLocalError for empty loca/glyph tables (#1680). Also, allow + the glyf table to be incomplete when dumping to XML (#1681). +- [varLib.models] Fixed KeyError while sorting masters and there are no on-axis for + a given axis (38a8eb0e). +- [cffLib] Make sure glyph names are unique (#1699). +- [feaLib] Fix feature parser to correctly handle octal numbers (#1700). + +3.44.0 (released 2019-08-02) +---------------------------- + +- NOTE: This is the last scheduled release to support Python 2.7. The upcoming fonttools + v4.x series is going to require Python 3.6 or greater. +- [varLib] Added new ``varLib.instancer`` module for partially instantiating variable + fonts. This extends (and will eventually replace) ``varLib.mutator`` module, as + it allows to create not just full static instances from a variable font, but also + "partial" or "less variable" fonts where some of the axes are dropped or + instantiated at a particular value. + Also available from the command-line as `fonttools varLib.instancer --help` + (#1537, #1628). +- [cffLib] Added support for ``FDSelect`` format 4 (#1677). +- [subset] Added support for subsetting ``sbix`` (Apple bitmap color font) table. +- [t1Lib] Fixed issue parsing ``eexec`` section in Type1 fonts when whitespace + characters are interspersed among the trailing zeros (#1676). +- [cffLib.specializer] Fixed bug in ``programToCommands`` with CFF2 charstrings (#1669). + +3.43.2 (released 2019-07-10) +---------------------------- + +- [featureVars] Fixed region-merging code on python3 (#1659). +- [varLib.cff] Fixed merging of sparse PrivateDict items (#1653). + +3.43.1 (released 2019-06-19) +---------------------------- + +- [subset] Fixed regression when passing ``--flavor=woff2`` option with an input font + that was already compressed as WOFF 1.0 (#1650). + +3.43.0 (released 2019-06-18) +---------------------------- + +- [woff2] Added support for compressing/decompressing WOFF2 fonts with non-transformed + ``glyf`` and ``loca`` tables, as well as with transformed ``hmtx`` table. + Removed ``Snippets/woff2_compress.py`` and ``Snippets/woff2_decompress.py`` scripts, + and replaced them with a new console entry point ``fonttools ttLib.woff2`` + that provides two sub-commands ``compress`` and ``decompress``. +- [varLib.cff] Fixed bug when merging CFF2 ``PrivateDicts``. The ``PrivateDict`` + data from the first region font was incorrecty used for all subsequent fonts. + The bug would only affect variable CFF2 fonts with hinting (#1643, #1644). + Also, fixed a merging bug when VF masters have no blends or marking glyphs (#1632, + #1642). +- [loggingTools] Removed unused backport of ``LastResortLogger`` class. +- [subset] Gracefully handle partial MATH table (#1635). +- [featureVars] Avoid duplicate references to ``rvrn`` feature record in + ``DefaultLangSys`` tables when calling ``addFeatureVariations`` on a font that + does not already have a ``GSUB`` table (aa8a5bc6). +- [varLib] Fixed merging of class-based kerning. Before, the process could introduce + rogue kerning values and variations for random classes against class zero (everything + not otherwise classed). +- [varLib] Fixed merging GPOS tables from master fonts with different number of + ``SinglePos`` subtables (#1621, #1641). +- [unicodedata] Updated Blocks, Scripts and ScriptExtensions to Unicode 12.1. + +3.42.0 (released 2019-05-28) +---------------------------- + +- [OS/2] Fixed sign of ``fsType``: it should be ``uint16``, not ``int16`` (#1619). +- [subset] Skip out-of-range class values in mark attachment (#1478). +- [fontBuilder] Add an empty ``DSIG`` table with ``setupDummyDSIG`` method (#1621). +- [varLib.merger] Fixed bug whereby ``GDEF.GlyphClassDef`` were being dropped + when generating instance via ``varLib.mutator`` (#1614). +- [varLib] Added command-line options ``-v`` and ``-q`` to configure logging (#1613). +- [subset] Update font extents in head table (#1612). +- [subset] Make --retain-gids truncate empty glyphs after the last non-empty glyph + (#1611). +- [requirements] Updated ``unicodedata2`` backport for Unicode 12.0. + +3.41.2 (released 2019-05-13) +---------------------------- + +- [cffLib] Fixed issue when importing a ``CFF2`` variable font from XML, whereby + the VarStore state was not propagated to PrivateDict (#1598). +- [varLib] Don't drop ``post`` glyph names when building CFF2 variable font (#1609). + + +3.41.1 (released 2019-05-13) +---------------------------- + +- [designspaceLib] Added ``loadSourceFonts`` method to load source fonts using + custom opener function (#1606). +- [head] Round font bounding box coordinates to integers to fix compile error + if CFF font has float coordinates (#1604, #1605). +- [feaLib] Don't write ``None`` in ``ast.ValueRecord.asFea()`` (#1599). +- [subset] Fixed issue ``AssertionError`` when using ``--desubroutinize`` option + (#1590, #1594). +- [graphite] Fixed bug in ``Silf`` table's ``decompile`` method unmasked by + previous typo fix (#1597). Decode languange code as UTF-8 in ``Sill`` table's + ``decompile`` method (#1600). + +3.41.0 (released 2019-04-29) +---------------------------- + +- [varLib/cffLib] Added support for building ``CFF2`` variable font from sparse + masters, or masters with more than one model (multiple ``VarStore.VarData``). + In ``cffLib.specializer``, added support for ``CFF2`` CharStrings with + ``blend`` operators (#1547, #1591). +- [subset] Fixed subsetting ``HVAR`` and ``VVAR`` with ``--retain-gids`` option, + and when advances mapping is null while sidebearings mappings are non-null + (#1587, #1588). +- Added ``otlLib.maxContextCalc`` module to compute ``OS/2.usMaxContext`` value. + Calculate it automatically when compiling features with feaLib. Added option + ``--recalc-max-context`` to ``subset`` module (#1582). +- [otBase/otTables] Fixed ``AttributeError`` on missing OT table fields after + importing font from TTX (#1584). +- [graphite] Fixed typo ``Silf`` table's ``decompile`` method (#1586). +- [otlLib] Better compress ``GPOS`` SinglePos (LookupType 1) subtables (#1539). + +3.40.0 (released 2019-04-08) +---------------------------- + +- [subset] Fixed error while subsetting ``VVAR`` with ``--retain-gids`` + option (#1552). +- [designspaceLib] Use up-to-date default location in ``findDefault`` method + (#1554). +- [voltLib] Allow passing file-like object to Parser. +- [arrayTools/glyf] ``calcIntBounds`` (used to compute bounding boxes of glyf + table's glyphs) now uses ``otRound`` instead of ``round3`` (#1566). +- [svgLib] Added support for converting more SVG shapes to path ``d`` strings + (ellipse, line, polyline), as well as support for ``transform`` attributes. + Only ``matrix`` transformations are currently supported (#1564, #1564). +- [varLib] Added support for building ``VVAR`` table from ``vmtx`` and ``VORG`` + tables (#1551). +- [fontBuilder] Enable making CFF2 fonts with ``post`` table format 2 (#1557). +- Fixed ``DeprecationWarning`` on invalid escape sequences (#1562). + +3.39.0 (released 2019-03-19) +---------------------------- + +- [ttLib/glyf] Raise more specific error when encountering recursive + component references (#1545, #1546). +- [Doc/designspaceLib] Defined new ``public.skipExportGlyphs`` lib key (#1534, + unified-font-object/ufo-spec#84). +- [varLib] Use ``vmtx`` to compute vertical phantom points; or ``hhea.ascent`` + and ``head.unitsPerEM`` if ``vmtx`` is missing (#1528). +- [gvar/cvar] Sort XML element's min/value/max attributes in TupleVariation + toXML to improve readability of TTX dump (#1527). +- [varLib.plot] Added support for 2D plots with only 1 variation axis (#1522). +- [designspaceLib] Use axes maps when normalizing locations in + DesignSpaceDocument (#1226, #1521), and when finding default source (#1535). +- [mutator] Set ``OVERLAP_SIMPLE`` and ``OVERLAP_COMPOUND`` glyf flags by + default in ``instantiateVariableFont``. Added ``--no-overlap`` cli option + to disable this (#1518). +- [subset] Fixed subsetting ``VVAR`` table (#1516, #1517). + Fixed subsetting an ``HVAR`` table that has an ``AdvanceWidthMap`` when the + option ``--retain-gids`` is used. +- [feaLib] Added ``forceChained`` in MultipleSubstStatement (#1511). + Fixed double indentation of ``subtable`` statement (#1512). + Added support for ``subtable`` statement in more places than just PairPos + lookups (#1520). + Handle lookupflag 0 and lookupflag without a value (#1540). +- [varLib] In ``load_designspace``, provide a default English name for the + ``ital`` axis tag. +- Remove pyftinspect because it is unmaintained and bitrotted. + +3.38.0 (released 2019-02-18) +---------------------------- + +- [cffLib] Fixed RecursionError when unpickling or deepcopying TTFont with + CFF table (#1488, 649dc49). +- [subset] Fixed AttributeError when using --desubroutinize option (#1490). + Also, fixed desubroutinizing bug when subrs contain hints (#1499). +- [CPAL] Make Color a subclass of namedtuple (173a0f5). +- [feaLib] Allow hyphen in glyph class names. +- [feaLib] Added 'tables' option to __main__.py (#1497). +- [feaLib] Add support for special-case contextual positioning formatting + (#1501). +- [svgLib] Support converting SVG basic shapes (rect, circle, etc.) into + equivalent SVG paths (#1500, #1508). +- [Snippets] Added name-viewer.ipynb Jupyter notebook. + + +3.37.3 (released 2019-02-05) +---------------------------- + +- The previous release accidentally changed several files from Unix to DOS + line-endings. Fix that. + +3.37.2 (released 2019-02-05) +---------------------------- + +- [varLib] Temporarily revert the fix to ``load_masters()``, which caused a + crash in ``interpolate_layout()`` when ``deepcopy``-ing OTFs. + +3.37.1 (released 2019-02-05) +---------------------------- + +- [varLib] ``load_masters()`` now actually assigns the fonts it loads to the + source.font attributes. +- [varLib] Fixed an MVAR table generation crash when sparse masters were + involved. +- [voltLib] ``parse_coverage_()`` returns a tuple instead of an ast.Enum. +- [feaLib] A MarkClassDefinition inside a block is no longer doubly indented + compared to the rest of the block. + +3.37.0 (released 2019-01-28) +---------------------------- + +- [svgLib] Added support for converting elliptical arcs to cubic bezier curves + (#1464). +- [py23] Added backport for ``math.isfinite``. +- [varLib] Apply HIDDEN flag to fvar axis if designspace axis has attribute + ``hidden=1``. +- Fixed "DeprecationWarning: invalid escape sequence" in Python 3.7. +- [voltLib] Fixed parsing glyph groups. Distinguish different PROCESS_MARKS. + Accept COMPONENT glyph type. +- [feaLib] Distinguish missing value and explicit ```` for PairPos2 + format A (#1459). Round-trip ``useExtension`` keyword. Implemented + ``ValueRecord.asFea`` method. +- [subset] Insert empty widths into hdmx when retaining gids (#1458). + +3.36.0 (released 2019-01-17) +---------------------------- + +- [ttx] Added ``--no-recalc-timestamp`` option to keep the original font's + ``head.modified`` timestamp (#1455, #46). +- [ttx/psCharStrings] Fixed issues while dumping and round-tripping CFF2 table + with ttx (#1451, #1452, #1456). +- [voltLib] Fixed check for duplicate anchors (#1450). Don't try to read past + the ``END`` operator in .vtp file (#1453). +- [varLib] Use sentinel value -0x8000 (-32768) to ignore post.underlineThickness + and post.underlinePosition when generating MVAR deltas (#1449, + googlei18n/ufo2ft#308). +- [subset] Added ``--retain-gids`` option to subset font without modifying the + current glyph indices (#1443, #1447). +- [ufoLib] Replace deprecated calls to ``getbytes`` and ``setbytes`` with new + equivalent ``readbytes`` and ``writebytes`` calls. ``fs`` >= 2.2 no required. +- [varLib] Allow loading masters from TTX files as well (#1441). + +3.35.2 (released 2019-01-14) +---------------------------- + +- [hmtx/vmtx]: Allow to compile/decompile ``hmtx`` and ``vmtx`` tables even + without the corresponding (required) metrics header tables, ``hhea`` and + ``vhea`` (#1439). +- [varLib] Added support for localized axes' ``labelname`` and named instances' + ``stylename`` (#1438). + +3.35.1 (released 2019-01-09) +---------------------------- + +- [_m_a_x_p] Include ``maxComponentElements`` in ``maxp`` table's recalculation. + +3.35.0 (released 2019-01-07) +---------------------------- + +- [psCharStrings] In ``encodeFloat`` function, use float's "general format" with + 8 digits of precision (i.e. ``%8g``) instead of ``str()``. This works around + a macOS rendering issue when real numbers in CFF table are too long, and + also makes sure that floats are encoded with the same precision in python 2.7 + and 3.x (#1430, googlei18n/ufo2ft#306). +- [_n_a_m_e/fontBuilder] Make ``_n_a_m_e_table.addMultilingualName`` also add + Macintosh (platformID=1) names by default. Added options to ``FontBuilder`` + ``setupNameTable`` method to optionally disable Macintosh or Windows names. + (#1359, #1431). +- [varLib] Make ``build`` optionally accept a ``DesignSpaceDocument`` object, + instead of a designspace file path. The caller can now set the ``font`` + attribute of designspace's sources to a TTFont object, thus allowing to + skip filenames manipulation altogether (#1416, #1425). +- [sfnt] Allow SFNTReader objects to be deep-copied. +- Require typing>=3.6.4 on py27 to fix issue with singledispatch (#1423). +- [designspaceLib/t1Lib/macRes] Fixed some cases where pathlib.Path objects were + not accepted (#1421). +- [varLib] Fixed merging of multiple PairPosFormat2 subtables (#1411). +- [varLib] The default STAT table version is now set to 1.1, to improve + compatibility with legacy applications (#1413). + +3.34.2 (released 2018-12-17) +---------------------------- + +- [merge] Fixed AssertionError when none of the script tables in GPOS/GSUB have + a DefaultLangSys record (#1408, 135a4a1). + +3.34.1 (released 2018-12-17) +---------------------------- + +- [varLib] Work around macOS rendering issue for composites without gvar entry (#1381). + +3.34.0 (released 2018-12-14) +---------------------------- + +- [varLib] Support generation of CFF2 variable fonts. ``model.reorderMasters()`` + now supports arbitrary mapping. Fix handling of overlapping ranges for feature + variations (#1400). +- [cffLib, subset] Code clean-up and fixing related to CFF2 support. +- [ttLib.tables.ttProgram] Use raw strings for regex patterns (#1389). +- [fontbuilder] Initial support for building CFF2 fonts. Set CFF's + ``FontMatrix`` automatically from unitsPerEm. +- [plistLib] Accept the more general ``collections.Mapping`` instead of the + specific ``dict`` class to support custom data classes that should serialize + to dictionaries. + +3.33.0 (released 2018-11-30) +---------------------------- +- [subset] subsetter bug fix with variable fonts. +- [varLib.featureVar] Improve FeatureVariations generation with many rules. +- [varLib] Enable sparse masters when building variable fonts: + https://github.com/fonttools/fonttools/pull/1368#issuecomment-437257368 +- [varLib.mutator] Add IDEF for GETVARIATION opcode, for handling hints in an + instance. +- [ttLib] Ignore the length of kern table subtable format 0 + +3.32.0 (released 2018-11-01) +---------------------------- + +- [ufoLib] Make ``UFOWriter`` a subclass of ``UFOReader``, and use mixins + for shared methods (#1344). +- [featureVars] Fixed normalization error when a condition's minimum/maximum + attributes are missing in designspace ```` (#1366). +- [setup.py] Added ``[plot]`` to extras, to optionally install ``matplotlib``, + needed to use the ``fonTools.varLib.plot`` module. +- [varLib] Take total bounding box into account when resolving model (7ee81c8). + If multiple axes have the same range ratio, cut across both (62003f4). +- [subset] Don't error if ``STAT`` has no ``AxisValue`` tables. +- [fontBuilder] Added a new submodule which contains a ``FontBuilder`` wrapper + class around ``TTFont`` that makes it easier to create a working TTF or OTF + font from scratch with code. NOTE: the API is still experimental and may + change in future versions. + +3.31.0 (released 2018-10-21) +---------------------------- + +- [ufoLib] Merged the `ufoLib `__ + master branch into a new ``fontTools.ufoLib`` package (#1335, #1095). + Moved ``ufoLib.pointPen`` module to ``fontTools.pens.pointPen``. + Moved ``ufoLib.etree`` module to ``fontTools.misc.etree``. + Moved ``ufoLib.plistlib`` module to ``fontTools.misc.plistlib``. + To use the new ``fontTools.ufoLib`` module you need to install fonttools + with the ``[ufo]`` extra, or you can manually install the required additional + dependencies (cf. README.rst). +- [morx] Support AAT action type to insert glyphs and clean up compilation + of AAT action tables (4a1871f, 2011ccf). +- [subset] The ``--no-hinting`` on a CFF font now also drops the optional + hinting keys in Private dict: ``ForceBold``, ``LanguageGroup``, and + ``ExpansionFactor`` (#1322). +- [subset] Include nameIDs referenced by STAT table (#1327). +- [loggingTools] Added ``msg=None`` argument to + ``CapturingLogHandler.assertRegex`` (0245f2c). +- [varLib.mutator] Implemented ``FeatureVariations`` instantiation (#1244). +- [g_l_y_f] Added PointPen support to ``_TTGlyph`` objects (#1334). + +3.30.0 (released 2018-09-18) +---------------------------- + +- [feaLib] Skip building noop class PairPos subtables when Coverage is NULL + (#1318). +- [ttx] Expose the previously reserved bit flag ``OVERLAP_SIMPLE`` of + glyf table's contour points in the TTX dump. This is used in some + implementations to specify a non-zero fill with overlapping contours (#1316). +- [ttLib] Added support for decompiling/compiling ``TS1C`` tables containing + VTT sources for ``cvar`` variation table (#1310). +- [varLib] Use ``fontTools.designspaceLib`` to read DesignSpaceDocument. The + ``fontTools.varLib.designspace`` module is now deprecated and will be removed + in future versions. The presence of an explicit ``axes`` element is now + required in order to build a variable font (#1224, #1313). +- [varLib] Implemented building GSUB FeatureVariations table from the ``rules`` + element of DesignSpace document (#1240, #713, #1314). +- [subset] Added ``--no-layout-closure`` option to not expand the subset with + the glyphs produced by OpenType layout features. Instead, OpenType features + will be subset to only rules that are relevant to the otherwise-specified + glyph set (#43, #1121). + +3.29.1 (released 2018-09-10) +---------------------------- + +- [feaLib] Fixed issue whereby lookups from DFLT/dflt were not included in the + DFLT/non-dflt language systems (#1307). +- [graphite] Fixed issue on big-endian architectures (e.g. ppc64) (#1311). +- [subset] Added ``--layout-scripts`` option to add/exclude set of OpenType + layout scripts that will be preserved. By default all scripts are retained + (``'*'``) (#1303). + +3.29.0 (released 2018-07-26) +---------------------------- + +- [feaLib] In the OTL table builder, when the ``name`` table is excluded + from the list of tables to be build, skip compiling ``featureNames`` blocks, + as the records referenced in ``FeatureParams`` table don't exist (68951b7). +- [otBase] Try ``ExtensionLookup`` if other offset-overflow methods fail + (05f95f0). +- [feaLib] Added support for explicit ``subtable;`` break statements in + PairPos lookups; previously these were ignored (#1279, #1300, #1302). +- [cffLib.specializer] Make sure the stack depth does not exceed maxstack - 1, + so that a subroutinizer can insert subroutine calls (#1301, + https://github.com/googlei18n/ufo2ft/issues/266). +- [otTables] Added support for fixing offset overflow errors occurring inside + ``MarkBasePos`` subtables (#1297). +- [subset] Write the default output file extension based on ``--flavor`` option, + or the value of ``TTFont.sfntVersion`` (d7ac0ad). +- [unicodedata] Updated Blocks, Scripts and ScriptExtensions for Unicode 11 + (452c85e). +- [xmlWriter] Added context manager to XMLWriter class to autoclose file + descriptor on exit (#1290). +- [psCharStrings] Optimize the charstring's bytecode by encoding as integers + all float values that have no decimal portion (8d7774a). +- [ttFont] Fixed missing import of ``TTLibError`` exception (#1285). +- [feaLib] Allow any languages other than ``dflt`` under ``DFLT`` script + (#1278, #1292). + +3.28.0 (released 2018-06-19) +---------------------------- + +- [featureVars] Added experimental module to build ``FeatureVariations`` + tables. Still needs to be hooked up to ``varLib.build`` (#1240). +- [fixedTools] Added ``otRound`` to round floats to nearest integer towards + positive Infinity. This is now used where we deal with visual data like X/Y + coordinates, advance widths/heights, variation deltas, and similar (#1274, + #1248). +- [subset] Improved GSUB closure memoize algorithm. +- [varLib.models] Fixed regression in model resolution (180124, #1269). +- [feaLib.ast] Fixed error when converting ``SubtableStatement`` to string + (#1275). +- [varLib.mutator] Set ``OS/2.usWeightClass`` and ``usWidthClass``, and + ``post.italicAngle`` based on the 'wght', 'wdth' and 'slnt' axis values + (#1276, #1264). +- [py23/loggingTools] Don't automatically set ``logging.lastResort`` handler + on py27. Moved ``LastResortLogger`` to the ``loggingTools`` module (#1277). + +3.27.1 (released 2018-06-11) +---------------------------- + +- [ttGlyphPen] Issue a warning and skip building non-existing components + (https://github.com/googlei18n/fontmake/issues/411). +- [tests] Fixed issue running ttx_test.py from a tagged commit. + +3.27.0 (released 2018-06-11) +---------------------------- + +- [designspaceLib] Added new ``conditionSet`` element to ``rule`` element in + designspace document. Bumped ``format`` attribute to ``4.0`` (previously, + it was formatted as an integer). Removed ``checkDefault``, ``checkAxes`` + methods, and any kind of guessing about the axes when the ```` element + is missing. The default master is expected at the intersection of all default + values for each axis (#1254, #1255, #1267). +- [cffLib] Fixed issues when compiling CFF2 or converting from CFF when the + font has an FDArray (#1211, #1271). +- [varLib] Avoid attempting to build ``cvar`` table when ``glyf`` table is not + present, as is the case for CFF2 fonts. +- [subset] Handle None coverages in MarkGlyphSets; revert commit 02616ab that + sets empty Coverage tables in MarkGlyphSets to None, to make OTS happy. +- [ttFont] Allow to build glyph order from ``maxp.numGlyphs`` when ``post`` or + ``cmap`` are missing. +- [ttFont] Added ``__len__`` method to ``_TTGlyphSet``. +- [glyf] Ensure ``GlyphCoordinates`` never overflow signed shorts (#1230). +- [py23] Added alias for ``itertools.izip`` shadowing the built-in ``zip``. +- [loggingTools] Memoize ``log`` property of ``LogMixin`` class (fbab12). +- [ttx] Impoved test coverage (#1261). +- [Snippets] Addded script to append a suffix to all family names in a font. +- [varLib.plot] Make it work with matplotlib >= 2.1 (b38e2b). + +3.26.0 (released 2018-05-03) +---------------------------- + +- [designspace] Added a new optional ``layer`` attribute to the source element, + and a corresponding ``layerName`` attribute to the ``SourceDescriptor`` + object (#1253). + Added ``conditionset`` element to the ``rule`` element to the spec, but not + implemented in designspace reader/writer yet (#1254). +- [varLib.models] Refine modeling one last time (0ecf5c5). +- [otBase] Fixed sharing of tables referred to by different offset sizes + (795f2f9). +- [subset] Don't drop a GDEF that only has VarStore (fc819d6). Set to None + empty Coverage tables in MarkGlyphSets (02616ab). +- [varLib]: Added ``--master-finder`` command-line option (#1249). +- [varLib.mutator] Prune fvar nameIDs from instance's name table (#1245). +- [otTables] Allow decompiling bad ClassDef tables with invalid format, with + warning (#1236). +- [varLib] Make STAT v1.2 and reuse nameIDs from fvar table (#1242). +- [varLib.plot] Show master locations. Set axis limits to -1, +1. +- [subset] Handle HVAR direct mapping. Passthrough 'cvar'. + Added ``--font-number`` command-line option for collections. +- [t1Lib] Allow a text encoding to be specified when parsing a Type 1 font + (#1234). Added ``kind`` argument to T1Font constructor (c5c161c). +- [ttLib] Added context manager API to ``TTFont`` class, so it can be used in + ``with`` statements to auto-close the file when exiting the context (#1232). + +3.25.0 (released 2018-04-03) +---------------------------- + +- [varLib] Improved support-resolution algorithm. Previously, the on-axis + masters would always cut the space. They don't anymore. That's more + consistent, and fixes the main issue Erik showed at TYPO Labs 2017. + Any varfont built that had an unusual master configuration will change + when rebuilt (42bef17, a523a697, + https://github.com/googlei18n/fontmake/issues/264). +- [varLib.models] Added a ``main()`` entry point, that takes positions and + prints model results. +- [varLib.plot] Added new module to plot a designspace's + VariationModel. Requires ``matplotlib``. +- [varLib.mutator] Added -o option to specify output file path (2ef60fa). +- [otTables] Fixed IndexError while pruning of HVAR pre-write (6b6c34a). +- [varLib.models] Convert delta array to floats if values overflows signed + short integer (0055f94). + +3.24.2 (released 2018-03-26) +---------------------------- + +- [otBase] Don't fail during ``ValueRecord`` copy if src has more items. + We drop hinting in the subsetter by simply changing ValueFormat, without + cleaning up the actual ValueRecords. This was causing assertion error if + a variable font was subsetted without hinting and then passed directly to + the mutator for instantiation without first it saving to disk. + +3.24.1 (released 2018-03-06) +---------------------------- + +- [varLib] Don't remap the same ``DeviceTable`` twice in VarStore optimizer + (#1206). +- [varLib] Add ``--disable-iup`` option to ``fonttools varLib`` script, + and a ``optimize=True`` keyword argument to ``varLib.build`` function, + to optionally disable IUP optimization while building varfonts. +- [ttCollection] Fixed issue while decompiling ttc with python3 (#1207). + +3.24.0 (released 2018-03-01) +---------------------------- + +- [ttGlyphPen] Decompose composite glyphs if any components' transform is too + large to fit a ``F2Dot14`` value, or clamp transform values that are + (almost) equal to +2.0 to make them fit and avoid decomposing (#1200, + #1204, #1205). +- [ttx] Added new ``-g`` option to dump glyphs from the ``glyf`` table + splitted as individual ttx files (#153, #1035, #1132, #1202). +- Copied ``ufoLib.filenames`` module to ``fontTools.misc.filenames``, used + for the ttx split-glyphs option (#1202). +- [feaLib] Added support for ``cvParameters`` blocks in Character Variant + feautures ``cv01-cv99`` (#860, #1169). +- [Snippets] Added ``checksum.py`` script to generate/check SHA1 hash of + ttx files (#1197). +- [varLib.mutator] Fixed issue while instantiating some variable fonts + whereby the horizontal advance width computed from ``gvar`` phantom points + could turn up to be negative (#1198). +- [varLib/subset] Fixed issue with subsetting GPOS variation data not + picking up ``ValueRecord`` ``Device`` objects (54fd71f). +- [feaLib/voltLib] In all AST elements, the ``location`` is no longer a + required positional argument, but an optional kewyord argument (defaults + to ``None``). This will make it easier to construct feature AST from + code (#1201). + + +3.23.0 (released 2018-02-26) +---------------------------- + +- [designspaceLib] Added an optional ``lib`` element to the designspace as a + whole, as well as to the instance elements, to store arbitrary data in a + property list dictionary, similar to the UFO's ``lib``. Added an optional + ``font`` attribute to the ``SourceDescriptor``, to allow operating on + in-memory font objects (#1175). +- [cffLib] Fixed issue with lazy-loading of attributes when attempting to + set the CFF TopDict.Encoding (#1177, #1187). +- [ttx] Fixed regression introduced in 3.22.0 that affected the split tables + ``-s`` option (#1188). +- [feaLib] Added ``IncludedFeaNotFound`` custom exception subclass, raised + when an included feature file cannot be found (#1186). +- [otTables] Changed ``VarIdxMap`` to use glyph names internally instead of + glyph indexes. The old ttx dumps of HVAR/VVAR tables that contain indexes + can still be imported (21cbab8, 38a0ffb). +- [varLib] Implemented VarStore optimizer (#1184). +- [subset] Implemented pruning of GDEF VarStore, HVAR and MVAR (#1179). +- [sfnt] Restore backward compatiblity with ``numFonts`` attribute of + ``SFNTReader`` object (#1181). +- [merge] Initial support for merging ``LangSysRecords`` (#1180). +- [ttCollection] don't seek(0) when writing to possibly unseekable strems. +- [subset] Keep all ``--name-IDs`` from 0 to 6 by default (#1170, #605, #114). +- [cffLib] Added ``width`` module to calculate optimal CFF default and + nominal glyph widths. +- [varLib] Don’t fail if STAT already in the master fonts (#1166). + +3.22.0 (released 2018-02-04) +---------------------------- + +- [subset] Support subsetting ``endchar`` acting as ``seac``-like components + in ``CFF`` (fixes #1162). +- [feaLib] Allow to build from pre-parsed ``ast.FeatureFile`` object. + Added ``tables`` argument to only build some tables instead of all (#1159, + #1163). +- [textTools] Replaced ``safeEval`` with ``ast.literal_eval`` (#1139). +- [feaLib] Added option to the parser to not resolve ``include`` statements + (#1154). +- [ttLib] Added new ``ttCollection`` module to read/write TrueType and + OpenType Collections. Exports a ``TTCollection`` class with a ``fonts`` + attribute containing a list of ``TTFont`` instances, the methods ``save`` + and ``saveXML``, plus some list-like methods. The ``importXML`` method is + not implemented yet (#17). +- [unicodeadata] Added ``ot_tag_to_script`` function that converts from + OpenType script tag to Unicode script code. +- Added new ``designspaceLib`` subpackage, originally from Erik Van Blokland's + ``designSpaceDocument``: https://github.com/LettError/designSpaceDocument + NOTE: this is not yet used internally by varLib, and the API may be subject + to changes (#911, #1110, LettError/designSpaceDocument#28). +- Added new FontTools icon images (8ee7c32). +- [unicodedata] Added ``script_horizontal_direction`` function that returns + either "LTR" or "RTL" given a unicode script code. +- [otConverters] Don't write descriptive name string as XML comment if the + NameID value is 0 (== NULL) (#1151, #1152). +- [unicodedata] Add ``ot_tags_from_script`` function to get the list of + OpenType script tags associated with unicode script code (#1150). +- [feaLib] Don't error when "enumerated" kern pairs conflict with preceding + single pairs; emit warning and chose the first value (#1147, #1148). +- [loggingTools] In ``CapturingLogHandler.assertRegex`` method, match the + fully formatted log message. +- [sbix] Fixed TypeError when concatenating str and bytes (#1154). +- [bezierTools] Implemented cusp support and removed ``approximate_fallback`` + arg in ``calcQuadraticArcLength``. Added ``calcCubicArcLength`` (#1142). + +3.21.2 (released 2018-01-08) +---------------------------- + +- [varLib] Fixed merging PairPos Format1/2 with missing subtables (#1125). + +3.21.1 (released 2018-01-03) +---------------------------- + +- [feaLib] Allow mixed single/multiple substitutions (#612) +- Added missing ``*.afm`` test assets to MAINFEST.in (#1137). +- Fixed dumping ``SVG`` tables containing color palettes (#1124). + +3.21.0 (released 2017-12-18) +---------------------------- + +- [cmap] when compiling format6 subtable, don't assume gid0 is always called + '.notdef' (1e42224). +- [ot] Allow decompiling fonts with bad Coverage format number (1aafae8). +- Change FontTools licence to MIT (#1127). +- [post] Prune extra names already in standard Mac set (df1e8c7). +- [subset] Delete empty SubrsIndex after subsetting (#994, #1118). +- [varLib] Don't share points in cvar by default, as it currently fails on + some browsers (#1113). +- [afmLib] Make poor old afmLib work on python3. + +3.20.1 (released 2017-11-22) +---------------------------- + +- [unicodedata] Fixed issue with ``script`` and ``script_extension`` functions + returning inconsistent short vs long names. They both return the short four- + letter script codes now. Added ``script_name`` and ``script_code`` functions + to look up the long human-readable script name from the script code, and + viceversa (#1109, #1111). + +3.20.0 (released 2017-11-21) +---------------------------- + +- [unicodedata] Addded new module ``fontTools.unicodedata`` which exports the + same interface as the built-in ``unicodedata`` module, with the addition of + a few functions that are missing from the latter, such as ``script``, + ``script_extension`` and ``block``. Added a ``MetaTools/buildUCD.py`` script + to download and parse data files from the Unicode Character Database and + generate python modules containing lists of ranges and property values. +- [feaLib] Added ``__str__`` method to all ``ast`` elements (delegates to the + ``asFea`` method). +- [feaLib] ``Parser`` constructor now accepts a ``glyphNames`` iterable + instead of ``glyphMap`` dict. The latter still works but with a pending + deprecation warning (#1104). +- [bezierTools] Added arc length calculation functions originally from + ``pens.perimeterPen`` module (#1101). +- [varLib] Started generating STAT table (8af4309). Right now it just reflects + the axes, and even that with certain limitations: + * AxisOrdering is set to the order axes are defined, + * Name-table entries are not shared with fvar. +- [py23] Added backports for ``redirect_stdout`` and ``redirect_stderr`` + context managers (#1097). +- [Graphite] Fixed some round-trip bugs (#1093). + +3.19.0 (released 2017-11-06) +---------------------------- + +- [varLib] Try set of used points instead of all points when testing whether to + share points between tuples (#1090). +- [CFF2] Fixed issue with reading/writing PrivateDict BlueValues to TTX file. + Read the commit message 8b02b5a and issue #1030 for more details. + NOTE: this change invalidates all the TTX files containing CFF2 tables + that where dumped with previous verisons of fonttools. + CFF2 Subr items can have values on the stack after the last operator, thus + a ``CFF2Subr`` class was added to accommodate this (#1091). +- [_k_e_r_n] Fixed compilation of AAT kern version=1.0 tables (#1089, #1094) +- [ttLib] Added getBestCmap() convenience method to TTFont class and cmap table + class that returns a preferred Unicode cmap subtable given a list of options + (#1092). +- [morx] Emit more meaningful subtable flags. Implement InsertionMorphAction + +3.18.0 (released 2017-10-30) +---------------------------- + +- [feaLib] Fixed writing back nested glyph classes (#1086). +- [TupleVariation] Reactivated shared points logic, bugfixes (#1009). +- [AAT] Implemented ``morx`` ligature subtables (#1082). +- [reverseContourPen] Keep duplicate lineTo following a moveTo (#1080, + https://github.com/googlei18n/cu2qu/issues/51). +- [varLib.mutator] Suport instantiation of GPOS, GDEF and MVAR (#1079). +- [sstruct] Fixed issue with ``unicode_literals`` and ``struct`` module in + old versions of python 2.7 (#993). + +3.17.0 (released 2017-10-16) +---------------------------- + +- [svgPathPen] Added an ``SVGPathPen`` that translates segment pen commands + into SVG path descriptions. Copied from Tal Leming's ``ufo2svg.svgPathPen`` + https://github.com/typesupply/ufo2svg/blob/d69f992/Lib/ufo2svg/svgPathPen.py +- [reverseContourPen] Added ``ReverseContourPen``, a filter pen that draws + contours with the winding direction reversed, while keeping the starting + point (#1071). +- [filterPen] Added ``ContourFilterPen`` to manipulate contours as a whole + rather than segment by segment. +- [arrayTools] Added ``Vector`` class to apply math operations on an array + of numbers, and ``pairwise`` function to loop over pairs of items in an + iterable. +- [varLib] Added support for building and interpolation of ``cvar`` table + (f874cf6, a25a401). + +3.16.0 (released 2017-10-03) +---------------------------- + +- [head] Try using ``SOURCE_DATE_EPOCH`` environment variable when setting + the ``head`` modified timestamp to ensure reproducible builds (#1063). + See https://reproducible-builds.org/specs/source-date-epoch/ +- [VTT] Decode VTT's ``TSI*`` tables text as UTF-8 (#1060). +- Added support for Graphite font tables: Feat, Glat, Gloc, Silf and Sill. + Thanks @mhosken! (#1054). +- [varLib] Default to using axis "name" attribute if "labelname" element + is missing (588f524). +- [merge] Added support for merging Script records. Remove unused features + and lookups after merge (d802580, 556508b). +- Added ``fontTools.svgLib`` package. Includes a parser for SVG Paths that + supports the Pen protocol (#1051). Also, added a snippet to convert SVG + outlines to UFO GLIF (#1053). +- [AAT] Added support for ``ankr``, ``bsln``, ``mort``, ``morx``, ``gcid``, + and ``cidg``. +- [subset] Implemented subsetting of ``prop``, ``opbd``, ``bsln``, ``lcar``. + +3.15.1 (released 2017-08-18) +---------------------------- + +- [otConverters] Implemented ``__add__`` and ``__radd__`` methods on + ``otConverters._LazyList`` that decompile a lazy list before adding + it to another list or ``_LazyList`` instance. Fixes an ``AttributeError`` + in the ``subset`` module when attempting to sum ``_LazyList`` objects + (6ef48bd2, 1aef1683). +- [AAT] Support the `opbd` table with optical bounds (a47f6588). +- [AAT] Support `prop` table with glyph properties (d05617b4). + + +3.15.0 (released 2017-08-17) +---------------------------- + +- [AAT] Added support for AAT lookups. The ``lcar`` table can be decompiled + and recompiled; futher work needed to handle ``morx`` table (#1025). +- [subset] Keep (empty) DefaultLangSys for Script 'DFLT' (6eb807b5). +- [subset] Support GSUB/GPOS.FeatureVariations (fe01d87b). +- [varLib] In ``models.supportScalars``, ignore an axis when its peak value + is 0 (fixes #1020). +- [varLib] Add default mappings to all axes in avar to fix rendering issue + in some rasterizers (19c4b377, 04eacf13). +- [varLib] Flatten multiple tail PairPosFormat2 subtables before merging + (c55ef525). +- [ttLib] Added support for recalculating font bounding box in ``CFF`` and + ``head`` tables, and min/max values in ``hhea`` and ``vhea`` tables (#970). + +3.14.0 (released 2017-07-31) +---------------------------- + +- [varLib.merger] Remove Extensions subtables before merging (f7c20cf8). +- [varLib] Initialize the avar segment map with required default entries + (#1014). +- [varLib] Implemented optimal IUP optmiziation (#1019). +- [otData] Add ``AxisValueFormat4`` for STAT table v1.2 from OT v1.8.2 + (#1015). +- [name] Fixed BCP46 language tag for Mac langID=9: 'si' -> 'sl'. +- [subset] Return value from ``_DehintingT2Decompiler.op_hintmask`` + (c0d672ba). +- [cffLib] Allow to get TopDict by index as well as by name (dca96c9c). +- [cffLib] Removed global ``isCFF2`` state; use one set of classes for + both CFF and CFF2, maintaining backward compatibility existing code (#1007). +- [cffLib] Deprecated maxstack operator, per OpenType spec update 1.8.1. +- [cffLib] Added missing default (-100) for UnderlinePosition (#983). +- [feaLib] Enable setting nameIDs greater than 255 (#1003). +- [varLib] Recalculate ValueFormat when merging SinglePos (#996). +- [varLib] Do not emit MVAR if there are no entries in the variation store + (#987). +- [ttx] For ``-x`` option, pad with space if table tag length is < 4. + +3.13.1 (released 2017-05-30) +---------------------------- + +- [feaLib.builder] Removed duplicate lookups optimization. The original + lookup order and semantics of the feature file are preserved (#976). + +3.13.0 (released 2017-05-24) +---------------------------- + +- [varLib.mutator] Implement IUP optimization (#969). +- [_g_l_y_f.GlyphCoordinates] Changed ``__bool__()`` semantics to match those + of other iterables (e46f949). Removed ``__abs__()`` (3db5be2). +- [varLib.interpolate_layout] Added ``mapped`` keyword argument to + ``interpolate_layout`` to allow disabling avar mapping: if False (default), + the location is mapped using the map element of the axes in designspace file; + if True, it is assumed that location is in designspace's internal space and + no mapping is performed (#950, #975). +- [varLib.interpolate_layout] Import designspace-loading logic from varLib. +- [varLib] Fixed bug with recombining PairPosClass2 subtables (81498e5, #914). +- [cffLib.specializer] When copying iterables, cast to list (462b7f86). + +3.12.1 (released 2017-05-18) +---------------------------- + +- [pens.t2CharStringPen] Fixed AttributeError when calling addComponent in + T2CharStringPen (#965). + +3.12.0 (released 2017-05-17) +---------------------------- + +- [cffLib.specializer] Added new ``specializer`` module to optimize CFF + charstrings, used by the T2CharStringPen (#948). +- [varLib.mutator] Sort glyphs by component depth before calculating composite + glyphs' bounding boxes to ensure deltas are correctly caclulated (#945). +- [_g_l_y_f] Fixed loss of precision in GlyphCoordinates by using 'd' (double) + instead of 'f' (float) as ``array.array`` typecode (#963, #964). + +3.11.0 (released 2017-05-03) +---------------------------- + +- [t2CharStringPen] Initial support for specialized Type2 path operators: + vmoveto, hmoveto, vlineto, hlineto, vvcurveto, hhcurveto, vhcurveto and + hvcurveto. This should produce more compact charstrings (#940, #403). +- [Doc] Added Sphinx sources for the documentation. Thanks @gferreira (#935). +- [fvar] Expose flags in XML (#932) +- [name] Add helper function for building multi-lingual names (#921) +- [varLib] Fixed kern merging when a PairPosFormat2 has ClassDef1 with glyphs + that are NOT present in the Coverage (1b5e1c4, #939). +- [varLib] Fixed non-deterministic ClassDef order with PY3 (f056c12, #927). +- [feLib] Throw an error when the same glyph is defined in multiple mark + classes within the same lookup (3e3ff00, #453). + +3.10.0 (released 2017-04-14) +---------------------------- + +- [varLib] Added support for building ``avar`` table, using the designspace + ```` elements. +- [varLib] Removed unused ``build(..., axisMap)`` argument. Axis map should + be specified in designspace file now. We do not accept nonstandard axes + if ```` element is not present. +- [varLib] Removed "custom" axis from the ``standard_axis_map``. This was + added before when glyphsLib was always exporting the (unused) custom axis. +- [varLib] Added partial support for building ``MVAR`` table; does not + implement ``gasp`` table variations yet. +- [pens] Added FilterPen base class, for pens that control another pen; + factored out ``addComponent`` method from BasePen into a separate abstract + DecomposingPen class; added DecomposingRecordingPen, which records + components decomposed as regular contours. +- [TSI1] Fixed computation of the textLength of VTT private tables (#913). +- [loggingTools] Added ``LogMixin`` class providing a ``log`` property to + subclasses, which returns a ``logging.Logger`` named after the latter. +- [loggingTools] Added ``assertRegex`` method to ``CapturingLogHandler``. +- [py23] Added backport for python 3's ``types.SimpleNamespace`` class. +- [EBLC] Fixed issue with python 3 ``zip`` iterator. + +3.9.2 (released 2017-04-08) +--------------------------- + +- [pens] Added pen to draw glyphs using WxPython ``GraphicsPath`` class: + https://wxpython.org/docs/api/wx.GraphicsPath-class.html +- [varLib.merger] Fixed issue with recombining multiple PairPosFormat2 + subtables (#888) +- [varLib] Do not encode gvar deltas that are all zeroes, or if all values + are smaller than tolerance. +- [ttLib] _TTGlyphSet glyphs now also have ``height`` and ``tsb`` (top + side bearing) attributes from the ``vmtx`` table, if present. +- [glyf] In ``GlyphCoordintes`` class, added ``__bool__`` / ``__nonzero__`` + methods, and ``array`` property to get raw array. +- [ttx] Support reading TTX files with BOM (#896) +- [CFF2] Fixed the reporting of the number of regions in the font. + +3.9.1 (released 2017-03-20) +--------------------------- + +- [varLib.merger] Fixed issue while recombining multiple PairPosFormat2 + subtables if they were split because of offset overflows (9798c30). +- [varLib.merger] Only merge multiple PairPosFormat1 subtables if there is + at least one of the fonts with a non-empty Format1 subtable (0f5a46b). +- [varLib.merger] Fixed IndexError with empty ClassDef1 in PairPosFormat2 + (aad0d46). +- [varLib.merger] Avoid reusing Class2Record (mutable) objects (e6125b3). +- [varLib.merger] Calculate ClassDef1 and ClassDef2's Format when merging + PairPosFormat2 (23511fd). +- [macUtils] Added missing ttLib import (b05f203). + +3.9.0 (released 2017-03-13) +--------------------------- + +- [feaLib] Added (partial) support for parsing feature file comments ``# ...`` + appearing in between statements (#879). +- [feaLib] Cleaned up syntax tree for FeatureNames. +- [ttLib] Added support for reading/writing ``CFF2`` table (thanks to + @readroberts at Adobe), and ``TTFA`` (ttfautohint) table. +- [varLib] Fixed regression introduced with 3.8.0 in the calculation of + ``NumShorts``, i.e. the number of deltas in ItemVariationData's delta sets + that use a 16-bit representation (b2825ff). + +3.8.0 (released 2017-03-05) +--------------------------- + +- New pens: MomentsPen, StatisticsPen, RecordingPen, and TeePen. +- [misc] Added new ``fontTools.misc.symfont`` module, for symbolic font + statistical analysis; requires ``sympy`` (http://www.sympy.org/en/index.html) +- [varLib] Added experimental ``fontTools.varLib.interpolatable`` module for + finding wrong contour order between different masters +- [varLib] designspace.load() now returns a dictionary, instead of a tuple, + and supports element (#864); the 'masters' item was renamed 'sources', + like the element in the designspace document +- [ttLib] Fixed issue with recalculating ``head`` modified timestamp when + saving CFF fonts +- [ttLib] In TupleVariation, round deltas before compiling (#861, fixed #592) +- [feaLib] Ignore duplicate glyphs in classes used as MarkFilteringSet and + MarkAttachmentType (#863) +- [merge] Changed the ``gasp`` table merge logic so that only the one from + the first font is retained, similar to other hinting tables (#862) +- [Tests] Added tests for the ``varLib`` package, as well as test fonts + from the "Annotated OpenType Specification" (AOTS) to exercise ``ttLib``'s + table readers/writers () + +3.7.2 (released 2017-02-17) +--------------------------- + +- [subset] Keep advance widths when stripping ".notdef" glyph outline in + CID-keyed CFF fonts (#845) +- [feaLib] Zero values now produce the same results as makeotf (#633, #848) +- [feaLib] More compact encoding for “Contextual positioning with in-line + single positioning rules” (#514) + +3.7.1 (released 2017-02-15) +--------------------------- + +- [subset] Fixed issue with ``--no-hinting`` option whereby advance widths in + Type 2 charstrings were also being stripped (#709, #343) +- [feaLib] include statements now resolve relative paths like makeotf (#838) +- [feaLib] table ``name`` now handles Unicode codepoints beyond the Basic + Multilingual Plane, also supports old-style MacOS platform encodings (#842) +- [feaLib] correctly escape string literals when emitting feature syntax (#780) + +3.7.0 (released 2017-02-11) +--------------------------- + +- [ttx, mtiLib] Preserve ordering of glyph alternates in GSUB type 3 (#833). +- [feaLib] Glyph names can have dashes, as per new AFDKO syntax v1.20 (#559). +- [feaLib] feaLib.Parser now needs the font's glyph map for parsing. +- [varLib] Fix regression where GPOS values were stored as 0. +- [varLib] Allow merging of class-based kerning when ClassDefs are different + +3.6.3 (released 2017-02-06) +--------------------------- + +- [varLib] Fix building variation of PairPosFormat2 (b5c34ce). +- Populate defaults even for otTables that have postRead (e45297b). +- Fix compiling of MultipleSubstFormat1 with zero 'out' glyphs (b887860). + +3.6.2 (released 2017-01-30) +--------------------------- + +- [varLib.merger] Fixed "TypeError: reduce() of empty sequence with no + initial value" (3717dc6). + +3.6.1 (released 2017-01-28) +--------------------------- + +- [py23] Fixed unhandled exception occurring at interpreter shutdown in + the "last resort" logging handler (972b3e6). +- [agl] Ensure all glyph names are of native 'str' type; avoid mixing + 'str' and 'unicode' in TTFont.glyphOrder (d8c4058). +- Fixed inconsistent title levels in README.rst that caused PyPI to + incorrectly render the reStructuredText page. + +3.6.0 (released 2017-01-26) +--------------------------- + +- [varLib] Refactored and improved the variation-font-building process. +- Assembly code in the fpgm, prep, and glyf tables is now indented in + XML output for improved readability. The ``instruction`` element is + written as a simple tag if empty (#819). +- [ttx] Fixed 'I/O operation on closed file' error when dumping + multiple TTXs to standard output with the '-o -' option. +- The unit test modules (``*_test.py``) have been moved outside of the + fontTools package to the Tests folder, thus they are no longer + installed (#811). + +3.5.0 (released 2017-01-14) +--------------------------- + +- Font tables read from XML can now be written back to XML with no + loss. +- GSUB/GPOS LookupType is written out in XML as an element, not + comment. (#792) +- When parsing cmap table, do not store items mapped to glyph id 0. + (#790) +- [otlLib] Make ClassDef sorting deterministic. Fixes #766 (7d1ddb2) +- [mtiLib] Added unit tests (#787) +- [cvar] Implemented cvar table +- [gvar] Renamed GlyphVariation to TupleVariation to match OpenType + terminology. +- [otTables] Handle gracefully empty VarData.Item array when compiling + XML. (#797) +- [varLib] Re-enabled generation of ``HVAR`` table for fonts with + TrueType outlines; removed ``--build-HVAR`` command-line option. +- [feaLib] The parser can now be extended to support non-standard + statements in FEA code by using a customized Abstract Syntax Tree. + See, for example, ``feaLib.builder_test.test_extensions`` and + baseClass.feax (#794, fixes #773). +- [feaLib] Added ``feaLib`` command to the 'fonttools' command-line + tool; applies a feature file to a font. ``fonttools feaLib -h`` for + help. +- [pens] The ``T2CharStringPen`` now takes an optional + ``roundTolerance`` argument to control the rounding of coordinates + (#804, fixes #769). +- [ci] Measure test coverage on all supported python versions and OSes, + combine coverage data and upload to + https://codecov.io/gh/fonttools/fonttools (#786) +- [ci] Configured Travis and Appveyor for running tests on Python 3.6 + (#785, 55c03bc) +- The manual pages installation directory can be customized through + ``FONTTOOLS_MANPATH`` environment variable (#799, fixes #84). +- [Snippets] Added otf2ttf.py, for converting fonts from CFF to + TrueType using the googlei18n/cu2qu module (#802) + +3.4.0 (released 2016-12-21) +--------------------------- + +- [feaLib] Added support for generating FEA text from abstract syntax + tree (AST) objects (#776). Thanks @mhosken +- Added ``agl.toUnicode`` function to convert AGL-compliant glyph names + to Unicode strings (#774) +- Implemented MVAR table (b4d5381) + +3.3.1 (released 2016-12-15) +--------------------------- + +- [setup] We no longer use versioneer.py to compute fonttools version + from git metadata, as this has caused issues for some users (#767). + Now we bump the version strings manually with a custom ``release`` + command of setup.py script. + +3.3.0 (released 2016-12-06) +--------------------------- + +- [ttLib] Implemented STAT table from OpenType 1.8 (#758) +- [cffLib] Fixed decompilation of CFF fonts containing non-standard + key/value pairs in FontDict (issue #740; PR #744) +- [py23] minor: in ``round3`` function, allow the second argument to be + ``None`` (#757) +- The standalone ``sstruct`` and ``xmlWriter`` modules, deprecated + since vesion 3.2.0, have been removed. They can be imported from the + ``fontTools.misc`` package. + +3.2.3 (released 2016-12-02) +--------------------------- + +- [py23] optimized performance of round3 function; added backport for + py35 math.isclose() (9d8dacb) +- [subset] fixed issue with 'narrow' (UCS-2) Python 2 builds and + ``--text``/``--text-file`` options containing non-BMP chararcters + (16d0e5e) +- [varLib] fixed issuewhen normalizing location values (8fa2ee1, #749) +- [inspect] Made it compatible with both python2 and python3 (167ee60, + #748). Thanks @pnemade + +3.2.2 (released 2016-11-24) +--------------------------- + +- [varLib] Do not emit null axes in fvar (1bebcec). Thanks @robmck-ms +- [varLib] Handle fonts without GPOS (7915a45) +- [merge] Ignore LangSys if None (a11bc56) +- [subset] Fix subsetting MathVariants (78d3cbe) +- [OS/2] Fix "Private Use (plane 15)" range (08a0d55). Thanks @mashabow + +3.2.1 (released 2016-11-03) +--------------------------- + +- [OS/2] fix checking ``fsSelection`` bits matching ``head.macStyle`` + bits +- [varLib] added ``--build-HVAR`` option to generate ``HVAR`` table for + fonts with TrueType outlines. For ``CFF2``, it is enabled by default. + +3.2.0 (released 2016-11-02) +--------------------------- + +- [varLib] Improve support for OpenType 1.8 Variable Fonts: +- Implement GDEF's VariationStore +- Implement HVAR/VVAR tables +- Partial support for loading MutatorMath .designspace files with + varLib.designspace module +- Add varLib.models with Variation fonts interpolation models +- Implement GSUB/GPOS FeatureVariations +- Initial support for interpolating and merging OpenType Layout tables + (see ``varLib.interpolate_layout`` and ``varLib.merger`` modules) +- [API change] Change version to be an integer instead of a float in + XML output for GSUB, GPOS, GDEF, MATH, BASE, JSTF, HVAR, VVAR, feat, + hhea and vhea tables. Scripts that set the Version for those to 1.0 + or other float values also need fixing. A warning is emitted when + code or XML needs fix. +- several bug fixes to the cffLib module, contributed by Adobe's + @readroberts +- The XML output for CFF table now has a 'major' and 'minor' elements + for specifying whether it's version 1.0 or 2.0 (support for CFF2 is + coming soon) +- [setup.py] remove undocumented/deprecated ``extra_path`` Distutils + argument. This means that we no longer create a "FontTools" subfolder + in site-packages containing the actual fontTools package, as well as + the standalone xmlWriter and sstruct modules. The latter modules are + also deprecated, and scheduled for removal in upcoming releases. + Please change your import statements to point to from fontTools.misc + import xmlWriter and from fontTools.misc import sstruct. +- [scripts] Add a 'fonttools' command-line tool that simply runs + ``fontTools.*`` sub-modules: e.g. ``fonttools ttx``, + ``fonttools subset``, etc. +- [hmtx/vmts] Read advance width/heights as unsigned short (uint16); + automatically round float values to integers. +- [ttLib/xmlWriter] add 'newlinestr=None' keyword argument to + ``TTFont.saveXML`` for overriding os-specific line endings (passed on + to ``XMLWriter`` instances). +- [versioning] Use versioneer instead of ``setuptools_scm`` to + dynamically load version info from a git checkout at import time. +- [feaLib] Support backslash-prefixed glyph names. + +3.1.2 (released 2016-09-27) +--------------------------- + +- restore Makefile as an alternative way to build/check/install +- README.md: update instructions for installing package from source, + and for running test suite +- NEWS: Change log was out of sync with tagged release + +3.1.1 (released 2016-09-27) +--------------------------- + +- Fix ``ttLibVersion`` attribute in TTX files still showing '3.0' + instead of '3.1'. +- Use ``setuptools_scm`` to manage package versions. + +3.1.0 (released 2016-09-26) +--------------------------- + +- [feaLib] New library to parse and compile Adobe FDK OpenType Feature + files. +- [mtiLib] New library to parse and compile Monotype 'FontDame' + OpenType Layout Tables files. +- [voltLib] New library to parse Microsoft VOLT project files. +- [otlLib] New library to work with OpenType Layout tables. +- [varLib] New library to work with OpenType Font Variations. +- [pens] Add ttGlyphPen to draw to TrueType glyphs, and t2CharStringPen + to draw to Type 2 Charstrings (CFF); add areaPen and perimeterPen. +- [ttLib.tables] Implement 'meta' and 'trak' tables. +- [ttx] Add --flavor option for compiling to 'woff' or 'woff2'; add + ``--with-zopfli`` option to use Zopfli to compress WOFF 1.0 fonts. +- [subset] Support subsetting 'COLR'/'CPAL' and 'CBDT'/'CBLC' color + fonts tables, and 'gvar' table for variation fonts. +- [Snippets] Add ``symfont.py``, for symbolic font statistics analysis; + interpolatable.py, a preliminary script for detecting interpolation + errors; ``{merge,dump}_woff_metadata.py``. +- [classifyTools] Helpers to classify things into classes. +- [CI] Run tests on Windows, Linux and macOS using Appveyor and Travis + CI; check unit test coverage with Coverage.py/Coveralls; automatic + deployment to PyPI on tags. +- [loggingTools] Use Python built-in logging module to print messages. +- [py23] Make round() behave like Python 3 built-in round(); define + round2() and round3(). + +3.0 (released 2015-09-01) +------------------------- + +- Add Snippet scripts for cmap subtable format conversion, printing + GSUB/GPOS features, building a GX font from two masters +- TTX WOFF2 support and a ``-f`` option to overwrite output file(s) +- Support GX tables: ``avar``, ``gvar``, ``fvar``, ``meta`` +- Support ``feat`` and gzip-compressed SVG tables +- Upgrade Mac East Asian encodings to native implementation if + available +- Add Roman Croatian and Romanian encodings, codecs for mac-extended + East Asian encodings +- Implement optimal GLYF glyph outline packing; disabled by default + +2.5 (released 2014-09-24) +------------------------- + +- Add a Qt pen +- Add VDMX table converter +- Load all OpenType sub-structures lazily +- Add support for cmap format 13. +- Add pyftmerge tool +- Update to Unicode 6.3.0d3 +- Add pyftinspect tool +- Add support for Google CBLC/CBDT color bitmaps, standard EBLC/EBDT + embedded bitmaps, and ``SVG`` table (thanks to Read Roberts at Adobe) +- Add support for loading, saving and ttx'ing WOFF file format +- Add support for Microsoft COLR/CPAL layered color glyphs +- Support PyPy +- Support Jython, by replacing numpy with array/lists modules and + removed it, pure-Python StringIO, not cStringIO +- Add pyftsubset and Subsetter object, supporting CFF and TTF +- Add to ttx args for -q for quiet mode, -z to choose a bitmap dump + format + +2.4 (released 2013-06-22) +------------------------- + +- Option to write to arbitrary files +- Better dump format for DSIG +- Better detection of OTF XML +- Fix issue with Apple's kern table format +- Fix mangling of TT glyph programs +- Fix issues related to mona.ttf +- Fix Windows Installer instructions +- Fix some modern MacOS issues +- Fix minor issues and typos + +2.3 (released 2009-11-08) +------------------------- + +- TrueType Collection (TTC) support +- Python 2.6 support +- Update Unicode data to 5.2.0 +- Couple of bug fixes + +2.2 (released 2008-05-18) +------------------------- + +- ClearType support +- cmap format 1 support +- PFA font support +- Switched from Numeric to numpy +- Update Unicode data to 5.1.0 +- Update AGLFN data to 1.6 +- Many bug fixes + +2.1 (released 2008-01-28) +------------------------- + +- Many years worth of fixes and features + +2.0b2 (released 2002-??-??) +--------------------------- + +- Be "forgiving" when interpreting the maxp table version field: + interpret any value as 1.0 if it's not 0.5. Fixes dumping of these + GPL fonts: http://www.freebsd.org/cgi/pds.cgi?ports/chinese/wangttf +- Fixed ttx -l: it turned out this part of the code didn't work with + Python 2.2.1 and earlier. My bad to do most of my testing with a + different version than I shipped TTX with :-( +- Fixed bug in ClassDef format 1 subtable (Andreas Seidel bumped into + this one). + +2.0b1 (released 2002-09-10) +--------------------------- + +- Fixed embarrassing bug: the master checksum in the head table is now + calculated correctly even on little-endian platforms (such as Intel). +- Made the cmap format 4 compiler smarter: the binary data it creates + is now more or less as compact as possible. TTX now makes more + compact data than in any shipping font I've tested it with. +- Dump glyph names as a separate "GlyphOrder" pseudo table as opposed + to as part of the glyf table (obviously needed for CFF-OTF's). +- Added proper support for the CFF table. +- Don't barf on empty tables (questionable, but "there are font out + there...") +- When writing TT glyf data, align glyphs on 4-byte boundaries. This + seems to be the current recommendation by MS. Also: don't barf on + fonts which are already 4-byte aligned. +- Windows installer contributed bu Adam Twardoch! Yay! +- Changed the command line interface again, now by creating one new + tool replacing the old ones: ttx It dumps and compiles, depending on + input file types. The options have changed somewhat. +- The -d option is back (output dir) +- ttcompile's -i options is now called -m (as in "merge"), to avoid + clash with dump's -i. +- The -s option ("split tables") no longer creates a directory, but + instead outputs a small .ttx file containing references to the + individual table files. This is not a true link, it's a simple file + name, and the referenced file should be in the same directory so + ttcompile can find them. +- compile no longer accepts a directory as input argument. Instead it + can parse the new "mini-ttx" format as output by "ttx -s". +- all arguments are input files +- Renamed the command line programs and moved them to the Tools + subdirectory. They are now installed by the setup.py install script. +- Added OpenType support. BASE, GDEF, GPOS, GSUB and JSTF are (almost) + fully supported. The XML output is not yet final, as I'm still + considering to output certain subtables in a more human-friendly + manner. +- Fixed 'kern' table to correctly accept subtables it doesn't know + about, as well as interpreting Apple's definition of the 'kern' table + headers correctly. +- Fixed bug where glyphnames were not calculated from 'cmap' if it was + (one of the) first tables to be decompiled. More specifically: it + cmap was the first to ask for a glyphID -> glyphName mapping. +- Switched XML parsers: use expat instead of xmlproc. Should be faster. +- Removed my UnicodeString object: I now require Python 2.0 or up, + which has unicode support built in. +- Removed assert in glyf table: redundant data at the end of the table + is now ignored instead of raising an error. Should become a warning. +- Fixed bug in hmtx/vmtx code that only occured if all advances were + equal. +- Fixed subtle bug in TT instruction disassembler. +- Couple of fixes to the 'post' table. +- Updated OS/2 table to latest spec. + +1.0b1 (released 2001-08-10) +--------------------------- + +- Reorganized the command line interface for ttDump.py and + ttCompile.py, they now behave more like "normal" command line tool, + in that they accept multiple input files for batch processing. +- ttDump.py and ttCompile.py don't silently override files anymore, but + ask before doing so. Can be overridden by -f. +- Added -d option to both ttDump.py and ttCompile.py. +- Installation is now done with distutils. (Needs work for environments + without compilers.) +- Updated installation instructions. +- Added some workarounds so as to handle certain buggy fonts more + gracefully. +- Updated Unicode table to Unicode 3.0 (Thanks Antoine!) +- Included a Python script by Adam Twardoch that adds some useful stuff + to the Windows registry. +- Moved the project to SourceForge. + +1.0a6 (released 2000-03-15) +--------------------------- + +- Big reorganization: made ttLib a subpackage of the new fontTools + package, changed several module names. Called the entire suite + "FontTools" +- Added several submodules to fontTools, some new, some older. +- Added experimental CFF/GPOS/GSUB support to ttLib, read-only (but XML + dumping of GPOS/GSUB is for now disabled) +- Fixed hdmx endian bug +- Added -b option to ttCompile.py, it disables recalculation of + bounding boxes, as requested by Werner Lemberg. +- Renamed tt2xml.pt to ttDump.py and xml2tt.py to ttCompile.py +- Use ".ttx" as file extension instead of ".xml". +- TTX is now the name of the XML-based *format* for TT fonts, and not + just an application. + +1.0a5 +----- + +Never released + +- More tables supported: hdmx, vhea, vmtx + +1.0a3 & 1.0a4 +------------- + +Never released + +- fixed most portability issues +- retracted the "Euro_or_currency" change from 1.0a2: it was + nonsense! + +1.0a2 (released 1999-05-02) +--------------------------- + +- binary release for MacOS +- genenates full FOND resources: including width table, PS font name + info and kern table if applicable. +- added cmap format 4 support. Extra: dumps Unicode char names as XML + comments! +- added cmap format 6 support +- now accepts true type files starting with "true" (instead of just + 0x00010000 and "OTTO") +- 'glyf' table support is now complete: I added support for composite + scale, xy-scale and two-by-two for the 'glyf' table. For now, + component offset scale behaviour defaults to Apple-style. This only + affects the (re)calculation of the glyph bounding box. +- changed "Euro" to "Euro_or_currency" in the Standard Apple Glyph + order list, since we cannot tell from the 'post' table which is + meant. I should probably doublecheck with a Unicode encoding if + available. (This does not affect the output!) + +Fixed bugs: - 'hhea' table is now recalculated correctly - fixed wrong +assumption about sfnt resource names + +1.0a1 (released 1999-04-27) +--------------------------- + +- initial binary release for MacOS diff --git a/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/RECORD b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..6930a0c62b0190000a209fbb04339642a4038db8 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/RECORD @@ -0,0 +1,643 @@ +../../../bin/fonttools,sha256=P48YST5UvCM333Sbg7SXc6Vxr_X-mXJgpmOCvDs4rkM,229 +../../../bin/pyftmerge,sha256=m3eET_RQ16kplqhro_EECz9-0ypaeZ9Wc09e_rQfJGg,226 +../../../bin/pyftsubset,sha256=n1h66COY4XMNZyatgzuitOX_yE9qdK2ktDAakWXxm5U,227 +../../../bin/ttx,sha256=wIiiSulL8BPTMwfKODhjOMlvJGJ98-40jUBvCZoxxdA,224 +../../../share/man/man1/ttx.1,sha256=cLbm_pOOj1C76T2QXvDxzwDj9gk-GTd5RztvTMsouFw,5377 +fontTools/__init__.py,sha256=QpXvyS41alnu5EXBOrRREAHue4HET1On9zYFOvSTOs8,183 +fontTools/__main__.py,sha256=VjkGh1UD-i1zTDA1dXo1uecSs6PxHdGQ5vlCk_mCCYs,925 +fontTools/__pycache__/__init__.cpython-310.pyc,, +fontTools/__pycache__/__main__.cpython-310.pyc,, +fontTools/__pycache__/afmLib.cpython-310.pyc,, +fontTools/__pycache__/agl.cpython-310.pyc,, +fontTools/__pycache__/fontBuilder.cpython-310.pyc,, +fontTools/__pycache__/help.cpython-310.pyc,, +fontTools/__pycache__/tfmLib.cpython-310.pyc,, +fontTools/__pycache__/ttx.cpython-310.pyc,, +fontTools/__pycache__/unicode.cpython-310.pyc,, +fontTools/afmLib.py,sha256=1MagIItOzRV4vV5kKPxeDZbPJsfxLB3wdHLFkQvl0uk,13164 +fontTools/agl.py,sha256=05bm8Uq45uVWW8nPbP6xbNgmFyxQr8sWhYAiP0VSjnI,112975 +fontTools/cffLib/CFF2ToCFF.py,sha256=K0bgyxhiVq7E7W3T68SJvMWBjWqDBSqx2whmNaPVPgc,5572 +fontTools/cffLib/CFFToCFF2.py,sha256=t3J641CQ2lI-LvynUaVpCiaiu-nVoLgWJwNe1kFGMjQ,9976 +fontTools/cffLib/__init__.py,sha256=itWKwFH6ij4lEbwgqP7NQRdb-Q_84UQPZD7M1SPOTY0,106866 +fontTools/cffLib/__pycache__/CFF2ToCFF.cpython-310.pyc,, +fontTools/cffLib/__pycache__/CFFToCFF2.cpython-310.pyc,, +fontTools/cffLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/cffLib/__pycache__/specializer.cpython-310.pyc,, +fontTools/cffLib/__pycache__/transforms.cpython-310.pyc,, +fontTools/cffLib/__pycache__/width.cpython-310.pyc,, +fontTools/cffLib/specializer.py,sha256=q3AeBbxWnfP_teaqTh1YfM-9e34Pxs_yGicxc93QMrc,30533 +fontTools/cffLib/transforms.py,sha256=gbcUnFz-MTBxkSYm-eZRox6J2UMlqBTgMgpPmx6SHwo,17015 +fontTools/cffLib/width.py,sha256=IqGL0CLyCZqi_hvsHySG08qpYxS3kaqW-tsAT-bjHV4,6074 +fontTools/colorLib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fontTools/colorLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/colorLib/__pycache__/builder.cpython-310.pyc,, +fontTools/colorLib/__pycache__/errors.cpython-310.pyc,, +fontTools/colorLib/__pycache__/geometry.cpython-310.pyc,, +fontTools/colorLib/__pycache__/table_builder.cpython-310.pyc,, +fontTools/colorLib/__pycache__/unbuilder.cpython-310.pyc,, +fontTools/colorLib/builder.py,sha256=kmO7OuudQQb3fEOS7aLzgTDVjqS9i2xIQmk9p1uBe8A,23008 +fontTools/colorLib/errors.py,sha256=CsaviiRxxrpgVX4blm7KCyK8553ljwL44xkJOeC5U7U,41 +fontTools/colorLib/geometry.py,sha256=3ScySrR2YDJa7d5K5_xM5Yt1-3NCV-ry8ikYA5VwVbI,5518 +fontTools/colorLib/table_builder.py,sha256=ZeltWY6n-YPiJv_hQ1iBXoEFAG70EKxZyScgsMKUFGU,7469 +fontTools/colorLib/unbuilder.py,sha256=iW-E5I39WsV82K3NgCO4Cjzwm1WqzGrtypHt8epwbHM,2142 +fontTools/config/__init__.py,sha256=Ti5jpozjMqp5qhnrmwNcWI6b9uvHzhZlbWXHTqVZlGI,2643 +fontTools/config/__pycache__/__init__.cpython-310.pyc,, +fontTools/cu2qu/__init__.py,sha256=Cuc7Uglb0nSgaraTxXY5J8bReznH5wApW0uakN7MycY,618 +fontTools/cu2qu/__main__.py,sha256=kTUI-jczsHeelULLlory74QEeFjZWp9zigCc7PrdVQY,92 +fontTools/cu2qu/__pycache__/__init__.cpython-310.pyc,, +fontTools/cu2qu/__pycache__/__main__.cpython-310.pyc,, +fontTools/cu2qu/__pycache__/benchmark.cpython-310.pyc,, +fontTools/cu2qu/__pycache__/cli.cpython-310.pyc,, +fontTools/cu2qu/__pycache__/cu2qu.cpython-310.pyc,, +fontTools/cu2qu/__pycache__/errors.cpython-310.pyc,, +fontTools/cu2qu/__pycache__/ufo.cpython-310.pyc,, +fontTools/cu2qu/benchmark.py,sha256=wasPJmf8q9k9UHjpHChC3WQAGbBAyHN9PvJzXvWC0Fw,1296 +fontTools/cu2qu/cli.py,sha256=MbAQnOpZwrUFe_tjAP3Tgf6uLdOgHlONUcPNeTXwH0Y,6076 +fontTools/cu2qu/cu2qu.c,sha256=xCmVdXDI_c_mtbU0KNZPs9cAP4h4nJo14Y_Mo7WWbao,593463 +fontTools/cu2qu/cu2qu.cpython-310-x86_64-linux-gnu.so,sha256=GWul8HeQZExM3O8sjWZ3lHBoFuwbVZP4Pk57eQB7-XE,976056 +fontTools/cu2qu/cu2qu.py,sha256=UIFGlFq9X6Pj_NuaXg7KWIzLyR1jnx7nMCX-hFVG0SQ,16466 +fontTools/cu2qu/errors.py,sha256=PyJNMy8lHDtKpfFkc0nkM8F4jNLZAC4lPQCN1Km4bpg,2441 +fontTools/cu2qu/ufo.py,sha256=qZR70uWdCia19Ff8GLn5NeItscvvn69DegjDZVF4eNI,11794 +fontTools/designspaceLib/__init__.py,sha256=gQKalFNhackVAU9t3Z0nz6zChKkm2-79qpArmud37tA,129251 +fontTools/designspaceLib/__main__.py,sha256=xhtYXo1T1tsykhQDD0tcconSNYgWL5hoTBORpVDUYrc,103 +fontTools/designspaceLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/designspaceLib/__pycache__/__main__.cpython-310.pyc,, +fontTools/designspaceLib/__pycache__/split.cpython-310.pyc,, +fontTools/designspaceLib/__pycache__/statNames.cpython-310.pyc,, +fontTools/designspaceLib/__pycache__/types.cpython-310.pyc,, +fontTools/designspaceLib/split.py,sha256=FB1NuvhUO453UXveQZi9oyrW_caoCPM3RADp1rYWkDs,19239 +fontTools/designspaceLib/statNames.py,sha256=lDqFxZAKSbpMuLsgbK6XtyHA5lqLyAK0t561wsSWmaM,9069 +fontTools/designspaceLib/types.py,sha256=ofK65qXNADqcpl7zI72Pa5s07-cm7G41iEmLVV44-Es,5320 +fontTools/encodings/MacRoman.py,sha256=4vEooUDm2gLCG8KIIDhRxm5-A64w7XrhP9cjDRr2Eo0,3576 +fontTools/encodings/StandardEncoding.py,sha256=Eo3AGE8FE_p-IVYYuV097KouSsF3UrXoRRN0XyvYbrs,3581 +fontTools/encodings/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75 +fontTools/encodings/__pycache__/MacRoman.cpython-310.pyc,, +fontTools/encodings/__pycache__/StandardEncoding.cpython-310.pyc,, +fontTools/encodings/__pycache__/__init__.cpython-310.pyc,, +fontTools/encodings/__pycache__/codecs.cpython-310.pyc,, +fontTools/encodings/codecs.py,sha256=u50ruwz9fcRsrUrRGpR17Cr55Ovn1fvCHCKrElVumDE,4721 +fontTools/feaLib/__init__.py,sha256=jlIru2ghxvb1HhC5Je2BCXjFJmFQlYKpruorPoz3BvQ,213 +fontTools/feaLib/__main__.py,sha256=Df2PA6LXwna98lSXiL7R4as_ZEdWCIk3egSM5w7GpvM,2240 +fontTools/feaLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/feaLib/__pycache__/__main__.cpython-310.pyc,, +fontTools/feaLib/__pycache__/ast.cpython-310.pyc,, +fontTools/feaLib/__pycache__/builder.cpython-310.pyc,, +fontTools/feaLib/__pycache__/error.cpython-310.pyc,, +fontTools/feaLib/__pycache__/lexer.cpython-310.pyc,, +fontTools/feaLib/__pycache__/location.cpython-310.pyc,, +fontTools/feaLib/__pycache__/lookupDebugInfo.cpython-310.pyc,, +fontTools/feaLib/__pycache__/parser.cpython-310.pyc,, +fontTools/feaLib/__pycache__/variableScalar.cpython-310.pyc,, +fontTools/feaLib/ast.py,sha256=_27skibzPidJtI5lUFeVjEv5NVaNPbuz4u8oZfMuxMk,73801 +fontTools/feaLib/builder.py,sha256=H-WC0TzlkQB1GZuLbijD66f8_aJexoXxQJ6L-cXQ0Bo,69994 +fontTools/feaLib/error.py,sha256=Tq2dZUlCOyLfjTr3qibsT2g9t-S_JEf6bKgyNX55oCE,643 +fontTools/feaLib/lexer.c,sha256=vQ4myMvJqvp8rdY6YeEQJHM2Crw_eFajkHWefik884Q,750756 +fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so,sha256=glRZueojI8WPTmhu_zopdIzYICdHsoy4urTEZOaiW6o,1346664 +fontTools/feaLib/lexer.py,sha256=emyMPmRoqNZkzxnJyI6JRCCtXrbCOFofwa9O6ABGLiw,11121 +fontTools/feaLib/location.py,sha256=JXzHqGV56EHdcq823AwA5oaK05hf_1ySWpScbo3zGC0,234 +fontTools/feaLib/lookupDebugInfo.py,sha256=gVRr5-APWfT_a5-25hRuawSVX8fEvXVsOSLWkH91T2w,304 +fontTools/feaLib/parser.py,sha256=wbfG_-rqrn2RWMRQMlR3-uaiM9k4_mzCVF-wPLr00rQ,98466 +fontTools/feaLib/variableScalar.py,sha256=Xu8tpDlQbfIfjnKnYDEf43EqVdyIJUy8_1ROVPg9_mg,4069 +fontTools/fontBuilder.py,sha256=phkTJlv-VKaZVG1MLyFCoxLwn5J8fpsfPVGz6Cjm7BM,33299 +fontTools/help.py,sha256=bAjatvIhV7TJyXI7WhsxdYO4YVlhScZXu_kRtHANEPo,1125 +fontTools/merge/__init__.py,sha256=-l65-mbTwSh0gjarnojIfsAX-ZkMtdz3vGTjtYHQ2ws,8250 +fontTools/merge/__main__.py,sha256=hDx3gfbUBO83AJKumSEhiV-xqNTJNNgK2uFjazOGTmw,94 +fontTools/merge/__pycache__/__init__.cpython-310.pyc,, +fontTools/merge/__pycache__/__main__.cpython-310.pyc,, +fontTools/merge/__pycache__/base.cpython-310.pyc,, +fontTools/merge/__pycache__/cmap.cpython-310.pyc,, +fontTools/merge/__pycache__/layout.cpython-310.pyc,, +fontTools/merge/__pycache__/options.cpython-310.pyc,, +fontTools/merge/__pycache__/tables.cpython-310.pyc,, +fontTools/merge/__pycache__/unicode.cpython-310.pyc,, +fontTools/merge/__pycache__/util.cpython-310.pyc,, +fontTools/merge/base.py,sha256=l0G1Px98E9ZdVuFLMUBKWdtr7Jb8JX8vxcjeaDUUnzY,2389 +fontTools/merge/cmap.py,sha256=_oCBnZfm5M7ebYRJnOYw5wUEICFmdR6kMUe1w6jsVuM,5545 +fontTools/merge/layout.py,sha256=fkMPGPLxEdxohS3scVM4W7LmNthSz-UPyocsffe2KqE,16075 +fontTools/merge/options.py,sha256=xko_1-WErcNQkirECzIOOYxSJR_bRtdQYQYOtmgccYI,2501 +fontTools/merge/tables.py,sha256=uBD1-XqOCDzFxp0D7ZDvrMRdd8R7eAm58WtYKhz-m5w,10640 +fontTools/merge/unicode.py,sha256=kb1Jrfuoq1KUcVhhSKnflAED_wMZxXDjVwB-CI9k05Y,4273 +fontTools/merge/util.py,sha256=BH3bZWNFy-Tsj1cth7aSpGVJ18YXKXqDakPn6Wzku6U,3378 +fontTools/misc/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75 +fontTools/misc/__pycache__/__init__.cpython-310.pyc,, +fontTools/misc/__pycache__/arrayTools.cpython-310.pyc,, +fontTools/misc/__pycache__/bezierTools.cpython-310.pyc,, +fontTools/misc/__pycache__/classifyTools.cpython-310.pyc,, +fontTools/misc/__pycache__/cliTools.cpython-310.pyc,, +fontTools/misc/__pycache__/configTools.cpython-310.pyc,, +fontTools/misc/__pycache__/cython.cpython-310.pyc,, +fontTools/misc/__pycache__/dictTools.cpython-310.pyc,, +fontTools/misc/__pycache__/eexec.cpython-310.pyc,, +fontTools/misc/__pycache__/encodingTools.cpython-310.pyc,, +fontTools/misc/__pycache__/etree.cpython-310.pyc,, +fontTools/misc/__pycache__/filenames.cpython-310.pyc,, +fontTools/misc/__pycache__/fixedTools.cpython-310.pyc,, +fontTools/misc/__pycache__/intTools.cpython-310.pyc,, +fontTools/misc/__pycache__/iterTools.cpython-310.pyc,, +fontTools/misc/__pycache__/lazyTools.cpython-310.pyc,, +fontTools/misc/__pycache__/loggingTools.cpython-310.pyc,, +fontTools/misc/__pycache__/macCreatorType.cpython-310.pyc,, +fontTools/misc/__pycache__/macRes.cpython-310.pyc,, +fontTools/misc/__pycache__/psCharStrings.cpython-310.pyc,, +fontTools/misc/__pycache__/psLib.cpython-310.pyc,, +fontTools/misc/__pycache__/psOperators.cpython-310.pyc,, +fontTools/misc/__pycache__/py23.cpython-310.pyc,, +fontTools/misc/__pycache__/roundTools.cpython-310.pyc,, +fontTools/misc/__pycache__/sstruct.cpython-310.pyc,, +fontTools/misc/__pycache__/symfont.cpython-310.pyc,, +fontTools/misc/__pycache__/testTools.cpython-310.pyc,, +fontTools/misc/__pycache__/textTools.cpython-310.pyc,, +fontTools/misc/__pycache__/timeTools.cpython-310.pyc,, +fontTools/misc/__pycache__/transform.cpython-310.pyc,, +fontTools/misc/__pycache__/treeTools.cpython-310.pyc,, +fontTools/misc/__pycache__/vector.cpython-310.pyc,, +fontTools/misc/__pycache__/visitor.cpython-310.pyc,, +fontTools/misc/__pycache__/xmlReader.cpython-310.pyc,, +fontTools/misc/__pycache__/xmlWriter.cpython-310.pyc,, +fontTools/misc/arrayTools.py,sha256=jZk__GE-K9VViZE_H-LPPj0smWbKng-yfPE8BfGp8HI,11483 +fontTools/misc/bezierTools.c,sha256=HuOs781YMbzNHWkGotqCqfhHwft4QkfyYYGG9mN5MwU,1806287 +fontTools/misc/bezierTools.cpython-310-x86_64-linux-gnu.so,sha256=hCl0R5smfg2ZFsamvEg8TAJY4XINOQDraOAO3SRq4-0,4340864 +fontTools/misc/bezierTools.py,sha256=JKPfNC8xY3tj_RrILA1N2dh4oy3hEXQ_BfKiA2_dqM0,44758 +fontTools/misc/classifyTools.py,sha256=zcg3EM4GOerBW9c063ljaLllgeeZ772EpFZjp9CdgLI,5613 +fontTools/misc/cliTools.py,sha256=qCznJMLCQu3ZHQD_4ctUnr3TkfAUdkGl-UuxZUrppy0,1862 +fontTools/misc/configTools.py,sha256=YXBE_vL2dMWCnK4oY3vtU15B79q82DtKp7h7XRqJc1Q,11188 +fontTools/misc/cython.py,sha256=eyLcL2Bw-SSToYro8f44dkkYRlQfiFbhcza0afS-qHE,682 +fontTools/misc/dictTools.py,sha256=VxjarsGJuk_wa3z29FSCtKZNCFfXtMBiNEu0RPAlpDk,2417 +fontTools/misc/eexec.py,sha256=GNn2OCRvO1HbbIeDPxk9i0glO7cux_AQaoVMXhBR8y8,3331 +fontTools/misc/encodingTools.py,sha256=hCv5PFfnXQJVCZA8Wyn1vr3vzLBbUuEPtGk5CzWM9RY,2073 +fontTools/misc/etree.py,sha256=EPldipUNNMvbPimNX7qOUwKkbpJMY4uyElhe-wqKWkM,17079 +fontTools/misc/filenames.py,sha256=MMCO3xjk1pcDc-baobcKd8IdoFPt-bcGqu8t8HUGAkI,8223 +fontTools/misc/fixedTools.py,sha256=gsotTCOJLyMis13M4_jQJ8-QPob2Gl2TtNJhW6FER1I,7647 +fontTools/misc/intTools.py,sha256=l6pjk4UYlXcyLtfC0DdOC5RL6UJ8ihRR0zRiYow5xA8,586 +fontTools/misc/iterTools.py,sha256=17H6LPZszp32bTKoNorp6uZF1PKj47BAbe5QG8irUjo,390 +fontTools/misc/lazyTools.py,sha256=BC6MmF-OzJ3GrBD8TYDZ-VCSN4UOx0pN0r3oF4GSoiw,1020 +fontTools/misc/loggingTools.py,sha256=2uXks8fEnBjdgJEcxMLvD77-lbOPto3neJ86bMqV_qM,19898 +fontTools/misc/macCreatorType.py,sha256=Je9jtqUr7EPbpH3QxlVl3pizoQ-1AOPMBIctHIMTM3k,1593 +fontTools/misc/macRes.py,sha256=GT_pnfPw2NCvvOF86nHLAnOtZ6SMHqEuLntaplXzvHM,8579 +fontTools/misc/plistlib/__init__.py,sha256=1HfhHPt3As6u2eRSlFfl6XdnXv_ypQImeQdWIw6wK7Y,21113 +fontTools/misc/plistlib/__pycache__/__init__.cpython-310.pyc,, +fontTools/misc/plistlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fontTools/misc/psCharStrings.py,sha256=Tb5-k_5krP0eu7qD054iGxE4Zybk9oB4jdiKzcsV0rw,43036 +fontTools/misc/psLib.py,sha256=ioIPm5x3MHkBXF2vzNkC4iVZYobrkWcyvFhmYsjOrPY,12099 +fontTools/misc/psOperators.py,sha256=9SLl5PPBulLo0Xxg_dqlJMitNIBdiGKdkXhOWsNSYZE,15700 +fontTools/misc/py23.py,sha256=aPVCEUz_deggwLBCeTSsccX6QgJavZqvdVtuhpzrPvA,2238 +fontTools/misc/roundTools.py,sha256=1RSXZ0gyi1qW42tz6WSBMJD1FlPdtgqKfWixVN9bd78,3173 +fontTools/misc/sstruct.py,sha256=HuXwoRr9-mAbBxI3gJ3n34ML7NAGSHsAAazaaloWQB4,7158 +fontTools/misc/symfont.py,sha256=dPh-kIzlSS33Ff61PeZM_qd3lMxe_gdCRlmG3wl-u1Q,7012 +fontTools/misc/testTools.py,sha256=P0lianKHKQ1re3IrLW5JGfoLgUXdtVJJceaNO5stA3o,6933 +fontTools/misc/textTools.py,sha256=pbhr6LVhm3J-0Z4saYnJfxBDzyoiw4BR9pAgwypiOw8,3377 +fontTools/misc/timeTools.py,sha256=e9h5pgzL04tBDXmCv_8eRGB4boFV8GKXlS6dq3ggEpw,2234 +fontTools/misc/transform.py,sha256=BfESxMaKIo_PtCZI-HyNLOADatCUAltf2c-REGVOJf8,14822 +fontTools/misc/treeTools.py,sha256=tLWkwyDHeZUPVOGNnJeD4Pn7x2bQeZetwJKaEAW2J2M,1269 +fontTools/misc/vector.py,sha256=6lqZcDjAgHJFQgjzD-ULQ_PrigAMfeZKaBZmAfcC0ig,4062 +fontTools/misc/visitor.py,sha256=S3I_OCavPhkwGQpwIKV9XjNCaWUcafo7HQCyxDI0nQg,5314 +fontTools/misc/xmlReader.py,sha256=igut4_d13RT4WarliqVvuuPybO1uSXVeoBOeW4j0_e4,6580 +fontTools/misc/xmlWriter.py,sha256=CA1c-Ov5vFTF9tT4bGk-f3yBvaX7lVmSdLPYygUqlAE,6046 +fontTools/mtiLib/__init__.py,sha256=vPgS5Ko7dE0GJX1aDmXSwLOaBENDUgdAAFvYVdQ4boo,46617 +fontTools/mtiLib/__main__.py,sha256=gd8X89jnZOe-752k7uaR1lWoiju-2zIT5Yx35Kl0Xek,94 +fontTools/mtiLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/mtiLib/__pycache__/__main__.cpython-310.pyc,, +fontTools/otlLib/__init__.py,sha256=D2leUW-3gsUTOFcJYGC18edBYjIJ804ut4qitJYWsaQ,45 +fontTools/otlLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/otlLib/__pycache__/builder.cpython-310.pyc,, +fontTools/otlLib/__pycache__/error.cpython-310.pyc,, +fontTools/otlLib/__pycache__/maxContextCalc.cpython-310.pyc,, +fontTools/otlLib/builder.py,sha256=n5WTJwuHsE8lgBdZJQyeUhW7qxbTSFrWTUODN2_qP2Y,119693 +fontTools/otlLib/error.py,sha256=cthuhBuOwZYpkTLi5gFPupUxkXkCHe-L_YgkE7N1wCI,335 +fontTools/otlLib/maxContextCalc.py,sha256=3es4Kt84TaZ49sA2ev1zrlwPJikJCAECx5KavwhyB-I,3175 +fontTools/otlLib/optimize/__init__.py,sha256=UUQRpNkHU2RczCRt-Gz7sEiYE9AQq9BHLXZEOyvsnX4,1530 +fontTools/otlLib/optimize/__main__.py,sha256=BvP472kA9KxBb9RMyyehPNevAfpmgW9MfdazkUiAO3M,104 +fontTools/otlLib/optimize/__pycache__/__init__.cpython-310.pyc,, +fontTools/otlLib/optimize/__pycache__/__main__.cpython-310.pyc,, +fontTools/otlLib/optimize/__pycache__/gpos.cpython-310.pyc,, +fontTools/otlLib/optimize/gpos.py,sha256=NTDLwjo90L4GiqdIdWkBEycQ7VcT7cOxxype73mFz8c,18474 +fontTools/pens/__init__.py,sha256=DJBWmoX_Haau7qlgmvWyfbhSzrX2qL636Rns7CG01pk,75 +fontTools/pens/__pycache__/__init__.cpython-310.pyc,, +fontTools/pens/__pycache__/areaPen.cpython-310.pyc,, +fontTools/pens/__pycache__/basePen.cpython-310.pyc,, +fontTools/pens/__pycache__/boundsPen.cpython-310.pyc,, +fontTools/pens/__pycache__/cairoPen.cpython-310.pyc,, +fontTools/pens/__pycache__/cocoaPen.cpython-310.pyc,, +fontTools/pens/__pycache__/cu2quPen.cpython-310.pyc,, +fontTools/pens/__pycache__/explicitClosingLinePen.cpython-310.pyc,, +fontTools/pens/__pycache__/filterPen.cpython-310.pyc,, +fontTools/pens/__pycache__/freetypePen.cpython-310.pyc,, +fontTools/pens/__pycache__/hashPointPen.cpython-310.pyc,, +fontTools/pens/__pycache__/momentsPen.cpython-310.pyc,, +fontTools/pens/__pycache__/perimeterPen.cpython-310.pyc,, +fontTools/pens/__pycache__/pointInsidePen.cpython-310.pyc,, +fontTools/pens/__pycache__/pointPen.cpython-310.pyc,, +fontTools/pens/__pycache__/qtPen.cpython-310.pyc,, +fontTools/pens/__pycache__/qu2cuPen.cpython-310.pyc,, +fontTools/pens/__pycache__/quartzPen.cpython-310.pyc,, +fontTools/pens/__pycache__/recordingPen.cpython-310.pyc,, +fontTools/pens/__pycache__/reportLabPen.cpython-310.pyc,, +fontTools/pens/__pycache__/reverseContourPen.cpython-310.pyc,, +fontTools/pens/__pycache__/roundingPen.cpython-310.pyc,, +fontTools/pens/__pycache__/statisticsPen.cpython-310.pyc,, +fontTools/pens/__pycache__/svgPathPen.cpython-310.pyc,, +fontTools/pens/__pycache__/t2CharStringPen.cpython-310.pyc,, +fontTools/pens/__pycache__/teePen.cpython-310.pyc,, +fontTools/pens/__pycache__/transformPen.cpython-310.pyc,, +fontTools/pens/__pycache__/ttGlyphPen.cpython-310.pyc,, +fontTools/pens/__pycache__/wxPen.cpython-310.pyc,, +fontTools/pens/areaPen.py,sha256=Y1WkmqzcC4z_bpGAR0IZUKrtHFtxKUQBmr5-64_zCOk,1472 +fontTools/pens/basePen.py,sha256=eIGSKrKm6w4LLHuG6XJoQZ3eObtoKV5P6aF4gT4sk7U,17073 +fontTools/pens/boundsPen.py,sha256=wE3owOQA8DfhH-zBGC3lJvnVwp-oyIt0KZrEqXbmS9I,3129 +fontTools/pens/cairoPen.py,sha256=wuuOJ1qQDSt_K3zscM2nukRyHZTZMwMzzCXCirfq_qQ,592 +fontTools/pens/cocoaPen.py,sha256=IJRQcAxRuVOTQ90bB_Bgjnmz7px_ST5uLF9CW-Y0KPY,612 +fontTools/pens/cu2quPen.py,sha256=gMUwFUsm_-WzBlDjTMQiNnEuI2heomGeOJBX81zYXPo,13007 +fontTools/pens/explicitClosingLinePen.py,sha256=kKKtdZiwaf8Cj4_ytrIDdGB2GMpPPDXm5Nwbw5WDgwU,3219 +fontTools/pens/filterPen.py,sha256=kKSvLmWCW4MkCF0ciJhjTj-LdUGOQL593PFkpm5PhP8,7790 +fontTools/pens/freetypePen.py,sha256=HD-gXJSbgImJdBc8sIBk0HWBdjv3WKFofs6PgCCsGOY,19908 +fontTools/pens/hashPointPen.py,sha256=gElrFyQoOQp3ZbpKHRWPwC61A9OgT2Js8crVUD8BQAY,3573 +fontTools/pens/momentsPen.c,sha256=HrqBRPCkiD0WXslFguGGEBgCq1h9Jx7mGkV9FX5vgkg,541056 +fontTools/pens/momentsPen.cpython-310-x86_64-linux-gnu.so,sha256=50DL3LHfXmKzXjGwqT3DuNliatfL8ZXY0uXHElxQjGE,1077352 +fontTools/pens/momentsPen.py,sha256=JsIL7KtRz0ZWG1_TPDzkwK0eNEr9excg3ggttRNGqIs,25685 +fontTools/pens/perimeterPen.py,sha256=lr6NzrIWxi4TXBJPbcJsKzqABWfQeil2Bgm9BgUD3N4,2153 +fontTools/pens/pointInsidePen.py,sha256=noEUvBQIeAheDMJwzvvfnEiKhmwbS1i0RQE9jik6Gl4,6355 +fontTools/pens/pointPen.py,sha256=IA0JVDaf8_aAvjRQv3asXItxxfzhv4gEEFvrlDlCx_k,22296 +fontTools/pens/qtPen.py,sha256=QRNLIry2rQl4E_7ct2tu10-qLHneQp0XV7FfaZ-tcL8,634 +fontTools/pens/qu2cuPen.py,sha256=pRST43-rUpzlOP83Z_Rr0IvIQBCx6RWI6nnNaitQcLk,3985 +fontTools/pens/quartzPen.py,sha256=EH482Kz_xsqYhVRovv6N_T1CXaSvOzUKPLxTaN956tU,1287 +fontTools/pens/recordingPen.py,sha256=VgFZ4NMhnZt1qSTzFEU0cma-gw3kBe47bfSxPYH73rs,12489 +fontTools/pens/reportLabPen.py,sha256=kpfMfOLXt2vOQ5smPsU82ft80FpCPWJzQLl7ENOH8Ew,2066 +fontTools/pens/reverseContourPen.py,sha256=oz64ZRhLAvT7DYMAwGKoLzZXQK8l81jRiYnTZkW6a-Y,4022 +fontTools/pens/roundingPen.py,sha256=Q4vvG0Esq_sLNODU0TITU4F3wcXcKWo4BA7DWdDaVcM,4649 +fontTools/pens/statisticsPen.py,sha256=Hjy8SmXxRzOtkTjpvKVmfY_2WcIZ5veZYuX-t6YQ1yA,9640 +fontTools/pens/svgPathPen.py,sha256=T3b6SZS9B9sVWMK9mSFDtjHeviQs_yOJOZKq5Sg5Zdg,8572 +fontTools/pens/t2CharStringPen.py,sha256=uq9KCOxrk5TEZGYpcOG-pgkWHYCe4dMwb2hx5uYOmWA,2391 +fontTools/pens/teePen.py,sha256=P1ARJOCMJ6MxK-PB1yZ-ips3CUfnadWYnQ_do6VIasQ,1290 +fontTools/pens/transformPen.py,sha256=s0kUyQdnemUwHvYr2SFboFmh4WY1S9OHBL8L4PJKRwE,4056 +fontTools/pens/ttGlyphPen.py,sha256=yLtB-E5pTQR59OKVYySttWBu1xC2vR8ezSaRhIMtVwg,11870 +fontTools/pens/wxPen.py,sha256=W9RRHlBWHp-CVC4Exvk3ytBmRaB4-LgJPP5Bv7o9BA0,680 +fontTools/qu2cu/__init__.py,sha256=Jfm1JljXbt91w4gyvZn6jzEmVnhRx50sh2fDongrOsE,618 +fontTools/qu2cu/__main__.py,sha256=9FWf6SIZaRaC8SiL0LhjAWC2yIdY9N_9wlRko8m1l2Q,93 +fontTools/qu2cu/__pycache__/__init__.cpython-310.pyc,, +fontTools/qu2cu/__pycache__/__main__.cpython-310.pyc,, +fontTools/qu2cu/__pycache__/benchmark.cpython-310.pyc,, +fontTools/qu2cu/__pycache__/cli.cpython-310.pyc,, +fontTools/qu2cu/__pycache__/qu2cu.cpython-310.pyc,, +fontTools/qu2cu/benchmark.py,sha256=GMcr_4r7L6K9SmJ13itt-_XKhnKqSVUDPlXUG6IZmmM,1400 +fontTools/qu2cu/cli.py,sha256=U2rooYnVVEalGRAWGFHk-Kp6Okys8wtzdaWLjw1bngY,3714 +fontTools/qu2cu/qu2cu.c,sha256=C3rzZrEWjwbuKDwHOi0zl1woncn_aAgo5CZz9j1XWdI,658853 +fontTools/qu2cu/qu2cu.cpython-310-x86_64-linux-gnu.so,sha256=FDmq-FeVXwv4kml6aXIZpgfW7LBAaEecDxVn9AjBSMU,1109672 +fontTools/qu2cu/qu2cu.py,sha256=1RKhaMBBiDvo5PtkNqR5p0X2HQ4yel4TbWT8MFU6Hps,12315 +fontTools/subset/__init__.py,sha256=nXS7IZ7HsPUn90nsgYSsttw6VABp9uy5lnyEx8BH9Dc,133662 +fontTools/subset/__main__.py,sha256=bhtfP2SqP4k799pxtksFgnC-XGNQDr3LcO4lc8T5e5g,95 +fontTools/subset/__pycache__/__init__.cpython-310.pyc,, +fontTools/subset/__pycache__/__main__.cpython-310.pyc,, +fontTools/subset/__pycache__/cff.cpython-310.pyc,, +fontTools/subset/__pycache__/svg.cpython-310.pyc,, +fontTools/subset/__pycache__/util.cpython-310.pyc,, +fontTools/subset/cff.py,sha256=rqMRJOlX5FacV1LW8aDlVOglgEM87TkMA9bdsYenask,6145 +fontTools/subset/svg.py,sha256=8dLBzQlnIt4_fOKEFDAVlKTucdHvcbCcyG9-a6UBZZ0,9384 +fontTools/subset/util.py,sha256=9SXFYb5Ef9Z58uXmYPCQil8B2i3Q7aFB_1fFDFSppdU,754 +fontTools/svgLib/__init__.py,sha256=IGCLwSbU8jLhq6HI2vSdPQgNs6zDUi5774TgX5MCXPY,75 +fontTools/svgLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/svgLib/path/__init__.py,sha256=C82fh7xH6ZHsSFVnV848-xeDezpokx1EwTmayJCouFU,1996 +fontTools/svgLib/path/__pycache__/__init__.cpython-310.pyc,, +fontTools/svgLib/path/__pycache__/arc.cpython-310.pyc,, +fontTools/svgLib/path/__pycache__/parser.cpython-310.pyc,, +fontTools/svgLib/path/__pycache__/shapes.cpython-310.pyc,, +fontTools/svgLib/path/arc.py,sha256=-f5Ym6q4tDWQ76sMNSTUTWgL_7AfgXojvBhtBS7bWwQ,5812 +fontTools/svgLib/path/parser.py,sha256=8T6okMstvgM9ufb2zBcwSzsuuoYbqfnUjNYgb6kjznU,10788 +fontTools/svgLib/path/shapes.py,sha256=xvBUIckKyT9JLy7q_ZP50r6TjvZANyHdZP7wFDzErcI,5322 +fontTools/t1Lib/__init__.py,sha256=p42y70wEIbuX0IIxZG7-b_I-gHto1VLy0gLsDvxCfkw,20865 +fontTools/t1Lib/__pycache__/__init__.cpython-310.pyc,, +fontTools/tfmLib.py,sha256=UMbkM73JXRJVS9t2B-BJc13rSjImaWBuzCoehLwHFhs,14270 +fontTools/ttLib/__init__.py,sha256=fjOFcwbRed9b_giTgJ7FLsqeJC8ndnx327WfJztW-Tc,553 +fontTools/ttLib/__main__.py,sha256=3yxwadpQ5YTM27RXqG3sFE3EaOSFLQVHaUUH9P0qrSw,3443 +fontTools/ttLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/ttLib/__pycache__/__main__.cpython-310.pyc,, +fontTools/ttLib/__pycache__/macUtils.cpython-310.pyc,, +fontTools/ttLib/__pycache__/removeOverlaps.cpython-310.pyc,, +fontTools/ttLib/__pycache__/reorderGlyphs.cpython-310.pyc,, +fontTools/ttLib/__pycache__/scaleUpem.cpython-310.pyc,, +fontTools/ttLib/__pycache__/sfnt.cpython-310.pyc,, +fontTools/ttLib/__pycache__/standardGlyphOrder.cpython-310.pyc,, +fontTools/ttLib/__pycache__/ttCollection.cpython-310.pyc,, +fontTools/ttLib/__pycache__/ttFont.cpython-310.pyc,, +fontTools/ttLib/__pycache__/ttGlyphSet.cpython-310.pyc,, +fontTools/ttLib/__pycache__/ttVisitor.cpython-310.pyc,, +fontTools/ttLib/__pycache__/woff2.cpython-310.pyc,, +fontTools/ttLib/macUtils.py,sha256=lj3oeFpyjV7ko_JqnluneITmAtlc119J-vwTTg2s73A,1737 +fontTools/ttLib/removeOverlaps.py,sha256=-jUIyAAukmUaPaxgCCtvnjwAqL95-zRPJ9MBnmGDz30,12463 +fontTools/ttLib/reorderGlyphs.py,sha256=y4UAVABTMykRWIF9_BJP1B8X4JRLde5GzIOkAafofE8,10011 +fontTools/ttLib/scaleUpem.py,sha256=U_-NGkwfS9GRIackdEXjGYZ-wSomcUPXQahDneLeArI,14618 +fontTools/ttLib/sfnt.py,sha256=rkznKfteU_Rn9P65WSjFaiwQgpEAoh-TrQpvkQhdIlo,22832 +fontTools/ttLib/standardGlyphOrder.py,sha256=7AY_fVWdtwZ4iv5uWdyKAUcbEQiSDt1lN4sqx9xXwE0,5785 +fontTools/ttLib/tables/B_A_S_E_.py,sha256=fotjQyGgXXMrLeWH-eu_R-OJ_ZepQ3GHOzQ3auhZ82Y,88 +fontTools/ttLib/tables/BitmapGlyphMetrics.py,sha256=9gcGPVzsxEYnVBO7YLWfeOuht9PaCl09GmbAqDYqKi0,1769 +fontTools/ttLib/tables/C_B_D_T_.py,sha256=cmxOO93VXhtS_nS6-iG9K2UUKHqTTEiFThV2wPMi0vA,3331 +fontTools/ttLib/tables/C_B_L_C_.py,sha256=2Qr_xPnZn6yKMgWU5LzKfPyOu-dUK7q6XtyKAOOJl-0,188 +fontTools/ttLib/tables/C_F_F_.py,sha256=jFX4ClhxD57IxfYDkDDCq2oJqSdbgAp1ghNQw5AYU7M,1443 +fontTools/ttLib/tables/C_F_F__2.py,sha256=TTX4_bKYGmFGt2lihlFfKw8LLc-wIr6uE2P45Rv4qW0,425 +fontTools/ttLib/tables/C_O_L_R_.py,sha256=qmexaOF-RtKSzHmekBPQIOa4Q2bmFMV3X_ytaCZhwhc,5725 +fontTools/ttLib/tables/C_P_A_L_.py,sha256=4bXVL-qFKQaQhW_llYQzXZQClL24aJkEy0ms0-Bh2gk,11631 +fontTools/ttLib/tables/D_S_I_G_.py,sha256=U5OCCI0sjhK5HvhNKaEonD0wucXzHXdfz5l3sb4CB8U,5327 +fontTools/ttLib/tables/D__e_b_g.py,sha256=vROIV3UTxbK9eN3rmHOu1ARwBiOXL6K5ihmq0QMToJQ,443 +fontTools/ttLib/tables/DefaultTable.py,sha256=cOtgkLWPY9qmOH2BSPt4c4IUSdANWTKx2rK1CTxQ4h0,1487 +fontTools/ttLib/tables/E_B_D_T_.py,sha256=8iakmy4PP8BNiem9ZT_P7ysu8BkV1gWFJD94K5ThVSo,32276 +fontTools/ttLib/tables/E_B_L_C_.py,sha256=yzlTk7EViBdSqw_8BzElEPZt7NsREH1nVobEBTlm6bg,29779 +fontTools/ttLib/tables/F_F_T_M_.py,sha256=aq9FsyfMegjxRsAWF8U2a3OpxFCPHJjNiLlC63dmqnI,1354 +fontTools/ttLib/tables/F__e_a_t.py,sha256=x3ryfFJPsGVWqy10a4ulXADBnsB2JEVpyx_DuWYqy8k,5380 +fontTools/ttLib/tables/G_D_E_F_.py,sha256=xN2hcW8GPMOos7dTpXJSWNJxUbGzUrnQ_2i-vxlNT_E,88 +fontTools/ttLib/tables/G_M_A_P_.py,sha256=S0KyulRo88aZ4YM8OJ_l8Mf0husmlI03IlXP6aa1C1w,4515 +fontTools/ttLib/tables/G_P_K_G_.py,sha256=XbfsF-qCk9ortdZycw7r6DEo94lfg6TTb3fN7HPYCuM,4441 +fontTools/ttLib/tables/G_P_O_S_.py,sha256=nVSjCI8k7-8aIkzIMc7bCmd2aHeVvjwPIh2jhwn9KY4,88 +fontTools/ttLib/tables/G_S_U_B_.py,sha256=-e_9Jxihz6AUSzSBCdW3tycdu4QZUsL8hZI6A7lMt9Q,88 +fontTools/ttLib/tables/G__l_a_t.py,sha256=rWcOEnv9GmNIvJu7y-cpnrAUkc82527LroBIYA7NQTI,8568 +fontTools/ttLib/tables/G__l_o_c.py,sha256=_MFYx8IUuJseNrS65QN-P8oq4CcGZnSxdGXKyv92Kco,2598 +fontTools/ttLib/tables/H_V_A_R_.py,sha256=bdU_ktJJ2-MQ_zFn1wWTtGpZar7OTFeOEnXyrzDhts8,88 +fontTools/ttLib/tables/J_S_T_F_.py,sha256=d36nOt42I5EY-7JDOulBHKtv1StpxxuvLU7gSOC6OGw,88 +fontTools/ttLib/tables/L_T_S_H_.py,sha256=DG559txp9zRwe5xlhhq8_HqkOvKrgbWUBw-11nKtw-o,1826 +fontTools/ttLib/tables/M_A_T_H_.py,sha256=zXSUNz98761iTREcge-YQ4LcEGCFhp1VVWAZt8B4TTQ,88 +fontTools/ttLib/tables/M_E_T_A_.py,sha256=0IZysRvZur6rhe4DP7P2JnKW0O9SgbxLBHBmAJMx5vA,11784 +fontTools/ttLib/tables/M_V_A_R_.py,sha256=uMresSbbzC43VL8Lou2bHjNmN3aY8wxxrV3qa6SSmR4,88 +fontTools/ttLib/tables/O_S_2f_2.py,sha256=4TN66vch-0lJnr-f-ErbfWbxuDF_JRTOt-qy84oDG2k,27752 +fontTools/ttLib/tables/S_I_N_G_.py,sha256=73zv425wym8w3MndveArHsp1TzM6VOQAz1gvwB9GgoQ,3112 +fontTools/ttLib/tables/S_T_A_T_.py,sha256=tPbD_6x4aJACOux8bKe_sFlk0PEat7aiZn8pnXoUGws,88 +fontTools/ttLib/tables/S_V_G_.py,sha256=8h8arIl9gedLB3GRRNF8V0x2pq1GikF7If9e_srB69I,7463 +fontTools/ttLib/tables/S__i_l_f.py,sha256=5hZ1ze12-tRyYIu-hEewRlgMWiuGHNf40om7Rs369_Q,34901 +fontTools/ttLib/tables/S__i_l_l.py,sha256=KvjK_vrh_YyPHtYwLyrHLx33gcTYg5lBnvUYie6b06M,3104 +fontTools/ttLib/tables/T_S_I_B_.py,sha256=CMcquVV86ug63Zk_yTB37DKqO91FZW14WtzwBI2aPjY,86 +fontTools/ttLib/tables/T_S_I_C_.py,sha256=TjDKgGdFEaL4Affo9MTInuVKbYUHMa0pJX18pzgYxT0,88 +fontTools/ttLib/tables/T_S_I_D_.py,sha256=OP_tHge02Fs7Y5lnVrgUGfr4FdIu-iv3GVtMEyH3Nrw,86 +fontTools/ttLib/tables/T_S_I_J_.py,sha256=soJ3cf52aXLQTqvhQV2bHzyRSh6bsxxvZcpAV4Z9tlc,86 +fontTools/ttLib/tables/T_S_I_P_.py,sha256=SvDvtRhxiC96WvZxNb2RoyTf0IXjeVMF_UP42ZD_vwU,86 +fontTools/ttLib/tables/T_S_I_S_.py,sha256=IHJsyWONSgbg9hm5VnkCeq70SQcwnNJZZO_dBtJGZFc,86 +fontTools/ttLib/tables/T_S_I_V_.py,sha256=Pqr8g0zrgCZl2sSJlxE5AYXazlZE29o1BO8oMVblBUs,655 +fontTools/ttLib/tables/T_S_I__0.py,sha256=c0F4nKBKTeURqxCFv3nwxCu9Dl0mh7wr0PhOrLKMjho,2043 +fontTools/ttLib/tables/T_S_I__1.py,sha256=N-BoLR5WWZv8tglokn5WZv8w_52jzKDG8jiZn5bS__k,6982 +fontTools/ttLib/tables/T_S_I__2.py,sha256=ZV39h3SKtVSxKF9dKkI4sC0X5oXLkQDSPCcOeBTxUTM,420 +fontTools/ttLib/tables/T_S_I__3.py,sha256=wQnwccPX3IaxGjzCdJHwtLh2ZqSsoAS-vWjhdI2h5dQ,467 +fontTools/ttLib/tables/T_S_I__5.py,sha256=jB-P8RMFC3KOGdtTQH5uzvqEJDIWhRlDFsuvAix0cl0,1510 +fontTools/ttLib/tables/T_T_F_A_.py,sha256=7wiKnyzrHiLgdtz6klG02flh8S7hm7GKarif7lw3IMc,81 +fontTools/ttLib/tables/TupleVariation.py,sha256=RBHDqKkdR-MQtN_pWRsVpBax4jzYcDIhA8zXSfJh4ZQ,30912 +fontTools/ttLib/tables/V_A_R_C_.py,sha256=KOtRqzdvsBXvl9vkUieGVROvIu0mTXuAXLXizNeSDWY,88 +fontTools/ttLib/tables/V_D_M_X_.py,sha256=dqE3G2Hg4ByQNteceOMctgFu2Er_DHh4_vOlAAaP5nM,10189 +fontTools/ttLib/tables/V_O_R_G_.py,sha256=XasThyPjPNah6Yn0TCFVv9H5kmYDx5FIMaH8B9sA2oU,5762 +fontTools/ttLib/tables/V_V_A_R_.py,sha256=X9C_r2HiSnI2mYqUQ93yK4zLpweRzobJ0Kh1J2lTsAw,88 +fontTools/ttLib/tables/__init__.py,sha256=iZ5iQZNhlH9M12ovQDu8EcnFwCe1ShoTvxEh22hGOoc,2624 +fontTools/ttLib/tables/__pycache__/B_A_S_E_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/BitmapGlyphMetrics.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/C_B_D_T_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/C_B_L_C_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/C_F_F_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/C_F_F__2.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/C_O_L_R_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/C_P_A_L_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/D_S_I_G_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/D__e_b_g.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/DefaultTable.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/E_B_D_T_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/E_B_L_C_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/F_F_T_M_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/F__e_a_t.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G_D_E_F_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G_M_A_P_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G_P_K_G_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G_P_O_S_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G_S_U_B_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G__l_a_t.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/G__l_o_c.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/H_V_A_R_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/J_S_T_F_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/L_T_S_H_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/M_A_T_H_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/M_E_T_A_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/M_V_A_R_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/O_S_2f_2.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/S_I_N_G_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/S_T_A_T_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/S_V_G_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/S__i_l_f.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/S__i_l_l.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_B_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_C_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_D_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_J_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_P_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_S_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I_V_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I__0.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I__1.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I__2.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I__3.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_S_I__5.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/T_T_F_A_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/TupleVariation.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/V_A_R_C_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/V_D_M_X_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/V_O_R_G_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/V_V_A_R_.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/__init__.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_a_n_k_r.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_a_v_a_r.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_b_s_l_n.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_c_i_d_g.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_c_m_a_p.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_c_v_a_r.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_c_v_t.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_f_e_a_t.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_f_p_g_m.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_f_v_a_r.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_g_a_s_p.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_g_c_i_d.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_g_l_y_f.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_g_v_a_r.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_h_d_m_x.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_h_e_a_d.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_h_h_e_a.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_h_m_t_x.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_k_e_r_n.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_l_c_a_r.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_l_o_c_a.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_l_t_a_g.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_m_a_x_p.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_m_e_t_a.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_m_o_r_t.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_m_o_r_x.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_n_a_m_e.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_o_p_b_d.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_p_o_s_t.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_p_r_e_p.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_p_r_o_p.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_s_b_i_x.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_t_r_a_k.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_v_h_e_a.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/_v_m_t_x.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/asciiTable.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/grUtils.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/otBase.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/otConverters.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/otData.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/otTables.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/otTraverse.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/sbixGlyph.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/sbixStrike.cpython-310.pyc,, +fontTools/ttLib/tables/__pycache__/ttProgram.cpython-310.pyc,, +fontTools/ttLib/tables/_a_n_k_r.py,sha256=DhIUAWnvXZZdC1jlh9ubcsobFahdtlJMsk7v_2s-WaM,462 +fontTools/ttLib/tables/_a_v_a_r.py,sha256=QqLK59G2UUYGZMjpTjhw9l01dcspANRydvnBwB8GnE0,7038 +fontTools/ttLib/tables/_b_s_l_n.py,sha256=D1tRo8TDAUxeCqVWsTma9u2VxRzxUkCpF84Lv_hy4rU,170 +fontTools/ttLib/tables/_c_i_d_g.py,sha256=A6llfYvsJQl0Mj6fnrRxUGXUlBkyEowo1J2euUulHM4,787 +fontTools/ttLib/tables/_c_m_a_p.py,sha256=OP0WuHxErqVIDEuGnJ20lel04jd9JeAYIYTENqKK--Y,61643 +fontTools/ttLib/tables/_c_v_a_r.py,sha256=Nlf8etrchBixD7qxFgxuDZ51VHA0XtsHfABDSgPG2RU,3307 +fontTools/ttLib/tables/_c_v_t.py,sha256=E_mDVniDspGjbBQk9CDEm8y3LJ5FbnHxZHRGbq-okHA,1361 +fontTools/ttLib/tables/_f_e_a_t.py,sha256=cshl7jgxj2RgzE8kECCkQVAW2ibJqgKLpZdT1PwyvuM,560 +fontTools/ttLib/tables/_f_p_g_m.py,sha256=-a5WYucI482KQ65rmbl8YwsD4q9BRyDIunJ_9MYAeyc,1170 +fontTools/ttLib/tables/_f_v_a_r.py,sha256=QJ90oDYxZv3o0u9piylqGGKyk-1ZGqt0vcsHtTfGwYY,8591 +fontTools/ttLib/tables/_g_a_s_p.py,sha256=Sp31uXdZyQO2Bbp4Qh5QBu75TvnDmxNQYhfMXf6PkCg,1916 +fontTools/ttLib/tables/_g_c_i_d.py,sha256=4VWq2u6c21ZOQ5_EJ5EwtZXC-zDz6SOPYwDDRZWRczA,170 +fontTools/ttLib/tables/_g_l_y_f.py,sha256=Jl3i3QPZw6AnSMQXCFpU9Gh9IbIEmDOOcQK_u71qO98,82196 +fontTools/ttLib/tables/_g_v_a_r.py,sha256=bVeiEmFZHdGvcaaZx3_shV_giz0FD1cnPNcLC8QZRU4,10191 +fontTools/ttLib/tables/_h_d_m_x.py,sha256=BOadCwbQhtiwQZoduvkvt6rtevP7BQiyd5KYnfjE0Cc,4024 +fontTools/ttLib/tables/_h_e_a_d.py,sha256=cWH7gPQdb7SoWH88eyHHv0HeJ-k7xyXWjorPVTMIMGs,4745 +fontTools/ttLib/tables/_h_h_e_a.py,sha256=YSMaTvNp3CD4G6WgGLmYdJGv_TKghKkT-IHW5Gw0iio,4434 +fontTools/ttLib/tables/_h_m_t_x.py,sha256=DEcruWWtBYNW6sHtuv17snMCUYkvdaVtx_lrZLLhBfc,5767 +fontTools/ttLib/tables/_k_e_r_n.py,sha256=SXkBnwz39gd6YHrQizGqz1orFEETp02vLgxzJSCNdYQ,10437 +fontTools/ttLib/tables/_l_c_a_r.py,sha256=SKmQ65spClbLnsYMDoqecsUOWWNyBDsFWut-Y6ahVhk,88 +fontTools/ttLib/tables/_l_o_c_a.py,sha256=JkfJoEMtrWPRPuTsxbARrvVJzJWMQv42NZ816KMOra8,1917 +fontTools/ttLib/tables/_l_t_a_g.py,sha256=L1ekoPzh4pMdWGRr-cdjL3M2asf4CqeUHq7zh4wvwrw,2274 +fontTools/ttLib/tables/_m_a_x_p.py,sha256=_aoIWOx9c6Sj-5OtBb7qu0dfARQEcOkV7VGxlnGmiDc,5061 +fontTools/ttLib/tables/_m_e_t_a.py,sha256=MslEJ7E0oO-JNHyAhtkRsBCBp0kK4OXfAgRqtRF9GDA,3651 +fontTools/ttLib/tables/_m_o_r_t.py,sha256=2p7PzPGzdOtFhg-Fxvdh0PO4yRs6_z_WjQegexeZCsw,170 +fontTools/ttLib/tables/_m_o_r_x.py,sha256=UJhBbA3mgVQO1oGmu_2bNXUwQreVSztG85F9k7DpmiQ,170 +fontTools/ttLib/tables/_n_a_m_e.py,sha256=geoF-ka_1h43vuuAF7QThLei_mlEESkrIuAI4tCfKGY,41030 +fontTools/ttLib/tables/_o_p_b_d.py,sha256=t3eqUkZPyaQbahEmKaqp7brDNbt4MQje2Vq1jBu-fEc,170 +fontTools/ttLib/tables/_p_o_s_t.py,sha256=DusC5HkI4eJw9jw9idb0GA1Xr9YuhQMnmsz4GM36kVI,11284 +fontTools/ttLib/tables/_p_r_e_p.py,sha256=97rDk0OiGoOD-foAIzqzYM1IKhB4gQuWyBrkH1PVvP0,115 +fontTools/ttLib/tables/_p_r_o_p.py,sha256=3JHFloIJwg9n4dzoe4KLobHc75oJh6DLNe51sakfz8E,170 +fontTools/ttLib/tables/_s_b_i_x.py,sha256=eHzNG4I8732aeW7iUNEEdYsxgsHT9sTtbaD2vvAxxR8,4443 +fontTools/ttLib/tables/_t_r_a_k.py,sha256=fZV1pQrAilSNc0Yd3x0XoIGbqlNoDv67LB2gb_CejMo,11069 +fontTools/ttLib/tables/_v_h_e_a.py,sha256=zHokAcH7CQ4tZPQAGmdTuv0_X-FHwyLWea1f9aFb1Gg,4130 +fontTools/ttLib/tables/_v_m_t_x.py,sha256=oUrskRNAf3FLIZaYLuk03np_IsIWBGUWbMFcdjU3Sys,229 +fontTools/ttLib/tables/asciiTable.py,sha256=4c69jsAirUnDEpylf9CYBoCKTzwbmfbtUAOrtPnpHjY,637 +fontTools/ttLib/tables/grUtils.py,sha256=hcOJ5oJPOd2uJWnWA7qwR7AfL37YZ5zUT7g8o5BBV80,2270 +fontTools/ttLib/tables/otBase.py,sha256=Vq8fv4lm-e3oPCuSPcWTNMQI2KmX0iOIz_lO9kE1mKE,53355 +fontTools/ttLib/tables/otConverters.py,sha256=YburADbtHu1kVg5v5eHl2CRrUOQCNCycUoWgvbteAsk,74054 +fontTools/ttLib/tables/otData.py,sha256=esZs8p10aaJjioueGZ5plMou2LnzhJeuD-q1AOA-Kek,197260 +fontTools/ttLib/tables/otTables.py,sha256=aCWou5-h4uhH2nPM2jwyD6OfoPhtnsOXm_ZefAawp4I,96937 +fontTools/ttLib/tables/otTraverse.py,sha256=oTr7nA7u7kEltLAhl4Kfl1RPD8O2_bKaoXa5l0hkRVA,5497 +fontTools/ttLib/tables/sbixGlyph.py,sha256=tjEUPVRfx6gr5yme8UytGTtVrimKN5qmbzT1GZPjXiM,5796 +fontTools/ttLib/tables/sbixStrike.py,sha256=gFyOlhRIGnd59y0SrhtsT2Ce4L3yaBrLoFJ_dK9u9mQ,6663 +fontTools/ttLib/tables/table_API_readme.txt,sha256=eZlRTLUkLzc_9Ot3pdfhyMb3ahU0_Iipx0vSbzOVGy8,2748 +fontTools/ttLib/tables/ttProgram.py,sha256=tgtxgd-EnOq-2PUlYEihp-6NHu_7HnE5rxeSAtmXOtU,35888 +fontTools/ttLib/ttCollection.py,sha256=aRph2MkBK3kd9-JCLqhJ1EN9pffN_lVX6WWmOTTewc8,3963 +fontTools/ttLib/ttFont.py,sha256=UXPMV4c5pctOWNygu2F6_kR6FFE9zWLLOGFjh9282WU,40976 +fontTools/ttLib/ttGlyphSet.py,sha256=1SAEMFLuzm5KSjjtXG23c--ihPnIvy0Lq37cHMV73Oc,17376 +fontTools/ttLib/ttVisitor.py,sha256=_tah4C42Tv6Pm9QeLNQwwVCxqI4VNEAqYCbmThp6cvY,1025 +fontTools/ttLib/woff2.py,sha256=Ryw4WVwUFMtdEo9FcIejP1OTV92Z4B9y5Wq7nWDW3lE,61058 +fontTools/ttx.py,sha256=XCerBn2ySMc5Bn54io4j5U5cW228GFREYvEeuvp0ZfM,16652 +fontTools/ufoLib/__init__.py,sha256=eGn4PHQc1PlY1VRuKj3WLHoT_XGkKjrnf99XYTLWjSI,93679 +fontTools/ufoLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/converters.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/errors.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/etree.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/filenames.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/glifLib.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/kerning.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/plistlib.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/pointPen.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/utils.cpython-310.pyc,, +fontTools/ufoLib/__pycache__/validators.cpython-310.pyc,, +fontTools/ufoLib/converters.py,sha256=EjuBkQxFltzeb-qnt2jzwieJH92f9ybcdZwAvQJi_Kw,10558 +fontTools/ufoLib/errors.py,sha256=UULZ4h1i_Lb9lywjScgC6N-wC4yyPceTSin0BebbhJk,584 +fontTools/ufoLib/etree.py,sha256=YQpCsRlLv0zfZUK8_i9cNFKBvyq1Gyy6HQbKyPLCoEY,224 +fontTools/ufoLib/filenames.py,sha256=Trm8k9AzXYYaYo0VwAgLJKCtWgsA1QjBlirmgXdZhjg,7562 +fontTools/ufoLib/glifLib.py,sha256=wpoSz624xqocPJbdzzElyCAgmEOjZVQeGr2KiZtHvAA,72053 +fontTools/ufoLib/kerning.py,sha256=0jPFd7mti884yvPjvYcU8lAWDwvVsNOObeQvVmPRJ3k,2973 +fontTools/ufoLib/plistlib.py,sha256=IpMh2FH9-6dxcvjSK4YR7L01HTIP1_RnQ8mWliyds1E,1499 +fontTools/ufoLib/pointPen.py,sha256=QGg6b_UeosZodcqqfAIPyAPUbfT7KgCxDwYfSR0GlCI,233 +fontTools/ufoLib/utils.py,sha256=8aqNHdFUd_imnawCQFY3UaXpF_s_4sHeinH0lqELTos,1893 +fontTools/ufoLib/validators.py,sha256=zIcp2weAYLOJBCvxbqBqAy34TaJrqpAlXKshJIkdhWI,30805 +fontTools/unicode.py,sha256=ZZ7OMmWvIyV1IL1k6ioTzaRAh3tUvm6gvK7QgFbOIHY,1237 +fontTools/unicodedata/Blocks.py,sha256=K72YZjkqty9zQH_UUIOa-hwblt3GrUrjXUbcOWhR0rg,32416 +fontTools/unicodedata/OTTags.py,sha256=wOPpbMsNcp_gdvPFeITtgVMnTN8TJSNAsVEdu_nuPXE,1196 +fontTools/unicodedata/ScriptExtensions.py,sha256=cm34XQGJEeSmkqSpNGLgWoScWvVXObQ-NnygIp64fMk,27713 +fontTools/unicodedata/Scripts.py,sha256=-aLU0oxjjgdVHsT9h5Ej3gMJwzxE7I7RLKOwKREIkkw,130272 +fontTools/unicodedata/__init__.py,sha256=DOCX0X9-Eo3mEju7Zjgcod7d8aswTo3vjDRzV7-8Z4g,8824 +fontTools/unicodedata/__pycache__/Blocks.cpython-310.pyc,, +fontTools/unicodedata/__pycache__/OTTags.cpython-310.pyc,, +fontTools/unicodedata/__pycache__/ScriptExtensions.cpython-310.pyc,, +fontTools/unicodedata/__pycache__/Scripts.cpython-310.pyc,, +fontTools/unicodedata/__pycache__/__init__.cpython-310.pyc,, +fontTools/varLib/__init__.py,sha256=mVDyxGfpPMKALtfnry-VgRt7fK8XdCqNWyV-b2aXgGI,53537 +fontTools/varLib/__main__.py,sha256=wbdYC5bPjWCxA0I4SKcLO88gl-UMtsYS8MxdW9ySTkY,95 +fontTools/varLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/varLib/__pycache__/__main__.cpython-310.pyc,, +fontTools/varLib/__pycache__/avar.cpython-310.pyc,, +fontTools/varLib/__pycache__/avarPlanner.cpython-310.pyc,, +fontTools/varLib/__pycache__/builder.cpython-310.pyc,, +fontTools/varLib/__pycache__/cff.cpython-310.pyc,, +fontTools/varLib/__pycache__/errors.cpython-310.pyc,, +fontTools/varLib/__pycache__/featureVars.cpython-310.pyc,, +fontTools/varLib/__pycache__/interpolatable.cpython-310.pyc,, +fontTools/varLib/__pycache__/interpolatableHelpers.cpython-310.pyc,, +fontTools/varLib/__pycache__/interpolatablePlot.cpython-310.pyc,, +fontTools/varLib/__pycache__/interpolatableTestContourOrder.cpython-310.pyc,, +fontTools/varLib/__pycache__/interpolatableTestStartingPoint.cpython-310.pyc,, +fontTools/varLib/__pycache__/interpolate_layout.cpython-310.pyc,, +fontTools/varLib/__pycache__/iup.cpython-310.pyc,, +fontTools/varLib/__pycache__/merger.cpython-310.pyc,, +fontTools/varLib/__pycache__/models.cpython-310.pyc,, +fontTools/varLib/__pycache__/multiVarStore.cpython-310.pyc,, +fontTools/varLib/__pycache__/mutator.cpython-310.pyc,, +fontTools/varLib/__pycache__/mvar.cpython-310.pyc,, +fontTools/varLib/__pycache__/plot.cpython-310.pyc,, +fontTools/varLib/__pycache__/stat.cpython-310.pyc,, +fontTools/varLib/__pycache__/varStore.cpython-310.pyc,, +fontTools/varLib/avar.py,sha256=Ye_u0HHznaPQaTzufNFKDj_v9o_LxOKJoa_eTK1D1F0,9647 +fontTools/varLib/avarPlanner.py,sha256=uLMGsL6cBbEMq5YItwABG_vXlXV3bxquM93WGDJ1brA,27358 +fontTools/varLib/builder.py,sha256=mSKOCcnnw-WzmZs15FayoqCDh77Ts7o9Tre9psh8CUc,6609 +fontTools/varLib/cff.py,sha256=EVgaQcoROIrYQsRuftnxFuGGldEPYbrIh5yBckylJC4,22901 +fontTools/varLib/errors.py,sha256=dMo8eGj76I7H4hrBEiNbYrGs2J1K1SwdsUyTHpkVOrQ,6934 +fontTools/varLib/featureVars.py,sha256=BCOBGjGUv2Rw_z0rlVi1ZYkTDcCMh0LyAUzDVJ2PYm4,25448 +fontTools/varLib/instancer/__init__.py,sha256=wFqRVbww2CjuJk3MPDQ2HGmpNBGNQd9JF58KQoBl8_c,71346 +fontTools/varLib/instancer/__main__.py,sha256=zfULwcP01FhplS1IlcMgNQnLxk5RVfmOuinWjqeid-g,104 +fontTools/varLib/instancer/__pycache__/__init__.cpython-310.pyc,, +fontTools/varLib/instancer/__pycache__/__main__.cpython-310.pyc,, +fontTools/varLib/instancer/__pycache__/featureVars.cpython-310.pyc,, +fontTools/varLib/instancer/__pycache__/names.cpython-310.pyc,, +fontTools/varLib/instancer/__pycache__/solver.cpython-310.pyc,, +fontTools/varLib/instancer/featureVars.py,sha256=oPqSlnHLMDTtOsmQMi6gkzLox7ymCrqlRAkvC_EJ4bc,7110 +fontTools/varLib/instancer/names.py,sha256=IPRqel_M8zVU0jl30WsfgufxUm9PBBQDQCY3VHapeHc,14950 +fontTools/varLib/instancer/solver.py,sha256=uMePwX0BVT5F94kUvDglsI4_F0nEH67F7RFuJ6tQwQ0,11002 +fontTools/varLib/interpolatable.py,sha256=4PL6mVkZ7lZUbkcaVZTNQx_lyWF92-Hh3NfgcbAvJ94,42756 +fontTools/varLib/interpolatableHelpers.py,sha256=lXd7kwfIVl-4opd-vxCDhf48RnJ7IQKv_uuFQM_6vaU,11496 +fontTools/varLib/interpolatablePlot.py,sha256=w393P6mGLRhYkIjSxMww3qyoYxAUZzCXlmPBbI_84C0,44375 +fontTools/varLib/interpolatableTestContourOrder.py,sha256=EmJ2jp4sHuSM5P-seYvOLk0HLdWyPOHeVWRKIGIKXx4,3033 +fontTools/varLib/interpolatableTestStartingPoint.py,sha256=K6OYKBspim6BXc91pfLTbGLyi5XZukfMuBc6hRpENG8,4296 +fontTools/varLib/interpolate_layout.py,sha256=22VjGZuV2YiAe2MpdTf0xPVz1x2G84bcOL0vOeBpGQM,3689 +fontTools/varLib/iup.c,sha256=w2M7V2o38Z7WaeDk2jFjNO_pD9aWdxZ6kZz3wD8OJ10,779751 +fontTools/varLib/iup.cpython-310-x86_64-linux-gnu.so,sha256=a2dhPc8Yu8GOzeOW87UNGBLCfkxXvGFqTCdbF9w6ETs,1514048 +fontTools/varLib/iup.py,sha256=bUk3O1QoFM8k_QEleHruT0biPoauX8AUJorbRuO21Vo,14675 +fontTools/varLib/merger.py,sha256=E59oli4AwqWZ-FgnuStMSBvsB-FHe-55esXTYUqGeJ8,60802 +fontTools/varLib/models.py,sha256=sj_ENljh_qcMbfYzRIOlRgHq6tFOmL02Wv6WO8uofis,22398 +fontTools/varLib/multiVarStore.py,sha256=GY3cLa6Z0CopuMuTrU25Hv4hlBJMkvjbDvzjbqbTNDM,8320 +fontTools/varLib/mutator.py,sha256=S624yKhtFSm-uBFji2W65QdT4db4pfUNZyMw4pigzv0,19236 +fontTools/varLib/mvar.py,sha256=LTV77vH_3Ecg_qKBO5xQzjLOlJir_ppEr7mPVZRgad8,2449 +fontTools/varLib/plot.py,sha256=NoSZkJ5ndxNcDvJIvd5pQ9_jX6X1oM1K2G_tR4sdPVs,7494 +fontTools/varLib/stat.py,sha256=pNtU3Jebm8Gr5umrbF5xGj5yJQciFwSFpfePOcg37xY,4535 +fontTools/varLib/varStore.py,sha256=RrBoEmNWCcsaL7CFZnzrcl26URVekUqTN4qoOy81eVQ,25160 +fontTools/voltLib/__init__.py,sha256=ZZ1AsTx1VlDn40Kupce-fM3meOWugy3RZraBW9LG-9M,151 +fontTools/voltLib/__pycache__/__init__.cpython-310.pyc,, +fontTools/voltLib/__pycache__/ast.cpython-310.pyc,, +fontTools/voltLib/__pycache__/error.cpython-310.pyc,, +fontTools/voltLib/__pycache__/lexer.cpython-310.pyc,, +fontTools/voltLib/__pycache__/parser.cpython-310.pyc,, +fontTools/voltLib/__pycache__/voltToFea.cpython-310.pyc,, +fontTools/voltLib/ast.py,sha256=sioOeSazmC8PxRMRql33I64JaCflu55UUZcikm9mwIY,13226 +fontTools/voltLib/error.py,sha256=phcQOQj-xOspCXu9hBJQRhSOBDzxHRgZd3fWQOFNJzw,395 +fontTools/voltLib/lexer.py,sha256=OvuETOSvlS6v7iCVeJ3IdH2Cg71n3OJoEyiB3-h6vhE,3368 +fontTools/voltLib/parser.py,sha256=wBSUrjLT3fSPv9Mjx6_ULIf8IcGlwjtb4Auxjh5wqnc,24916 +fontTools/voltLib/voltToFea.py,sha256=igP7_E-7AzSl8f_LiN_GHMoNmFiXPBaXp_zZLndjU4c,28505 +fonttools-4.54.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fonttools-4.54.1.dist-info/LICENSE,sha256=Z4cgj4P2Wcy8IiOy_elS_6b36KymLxqKK_W8UbsbI4M,1072 +fonttools-4.54.1.dist-info/METADATA,sha256=FPeXGmgCDbfkFwwdPYM2c6PEM4Cm0PNA19Blj4-lw6c,163697 +fonttools-4.54.1.dist-info/RECORD,, +fonttools-4.54.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fonttools-4.54.1.dist-info/WHEEL,sha256=VXRyidHovicsPXAYYBPK-lnsPgFrrhXkyzySBEhHzcg,151 +fonttools-4.54.1.dist-info/entry_points.txt,sha256=8kVHddxfFWA44FSD4mBpmC-4uCynQnkoz_9aNJb227Y,147 +fonttools-4.54.1.dist-info/top_level.txt,sha256=rRgRylrXzekqWOsrhygzib12pQ7WILf7UGjqEwkIFDM,10 diff --git a/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/WHEEL b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..0b1249e7ab4978615ea2e05101e51c0779008f78 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.1.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/top_level.txt b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..9af65ba39d292309497df4accdc44bd6f8143d10 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/fonttools-4.54.1.dist-info/top_level.txt @@ -0,0 +1 @@ +fontTools diff --git a/parrot/lib/python3.10/site-packages/h11/__init__.py b/parrot/lib/python3.10/site-packages/h11/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..989e92c3458681a6f0be72ae4105ea742750d328 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/parrot/lib/python3.10/site-packages/h11/_abnf.py b/parrot/lib/python3.10/site-packages/h11/_abnf.py new file mode 100644 index 0000000000000000000000000000000000000000..933587fba22290d7eb7df4c88e12f1e61702b8ce --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/parrot/lib/python3.10/site-packages/h11/_connection.py b/parrot/lib/python3.10/site-packages/h11/_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..d1752707598154d190d69b2c26f3098b74656652 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_connection.py @@ -0,0 +1,633 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() # type: ignore[attr-defined] + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/parrot/lib/python3.10/site-packages/h11/_events.py b/parrot/lib/python3.10/site-packages/h11/_events.py new file mode 100644 index 0000000000000000000000000000000000000000..075bf8a469d44d2388b08ec3d009fe55d44cb6eb --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/parrot/lib/python3.10/site-packages/h11/_headers.py b/parrot/lib/python3.10/site-packages/h11/_headers.py new file mode 100644 index 0000000000000000000000000000000000000000..b97d020b634a9f47f5ae6aa3b30e2bd13a6c48c4 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_headers.py @@ -0,0 +1,278 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/parrot/lib/python3.10/site-packages/h11/_readers.py b/parrot/lib/python3.10/site-packages/h11/_readers.py new file mode 100644 index 0000000000000000000000000000000000000000..08a9574da4a89d82dfb71b3087b14c8644102dd6 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_readers.py @@ -0,0 +1,247 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/parrot/lib/python3.10/site-packages/h11/_receivebuffer.py b/parrot/lib/python3.10/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000000000000000000000000000000000000..e5c4e08a56f5081e87103f38b4add6ce1b730204 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/parrot/lib/python3.10/site-packages/h11/_util.py b/parrot/lib/python3.10/site-packages/h11/_util.py new file mode 100644 index 0000000000000000000000000000000000000000..6718445290770e028ea2f1f662026c9a0b0991db --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/parrot/lib/python3.10/site-packages/h11/_version.py b/parrot/lib/python3.10/site-packages/h11/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..4c8911305680c1083b2da9b87ece12bc36f3a9e1 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.14.0" diff --git a/parrot/lib/python3.10/site-packages/h11/_writers.py b/parrot/lib/python3.10/site-packages/h11/_writers.py new file mode 100644 index 0000000000000000000000000000000000000000..939cdb912a9debaea07fbf3a9ac04549c44d077c --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/parrot/lib/python3.10/site-packages/h11/py.typed b/parrot/lib/python3.10/site-packages/h11/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..f5642f79f21d872f010979dcf6f0c4a415acc19d --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/parrot/lib/python3.10/site-packages/h11/tests/helpers.py b/parrot/lib/python3.10/site-packages/h11/tests/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..571be44461b0847c9edb8654c9d528abed0b7800 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/h11/tests/helpers.py @@ -0,0 +1,101 @@ +from typing import cast, List, Type, Union, ValuesView + +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + event = cast(Event, event) + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] + for event in in_events: + if type(event) is Data: + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self) -> None: + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self) -> ValuesView[Connection]: + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/parrot/lib/python3.10/site-packages/nvidia_cublas_cu12-12.1.3.1.dist-info/INSTALLER b/parrot/lib/python3.10/site-packages/nvidia_cublas_cu12-12.1.3.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/nvidia_cublas_cu12-12.1.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/parrot/lib/python3.10/site-packages/nvidia_cublas_cu12-12.1.3.1.dist-info/License.txt b/parrot/lib/python3.10/site-packages/nvidia_cublas_cu12-12.1.3.1.dist-info/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/parrot/lib/python3.10/site-packages/nvidia_cublas_cu12-12.1.3.1.dist-info/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/parrot/lib/python3.10/site-packages/pyarrow/_acero.cpython-310-x86_64-linux-gnu.so b/parrot/lib/python3.10/site-packages/pyarrow/_acero.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..f923a3f415dc561ae337651a0a7840a8accb1296 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/pyarrow/_acero.cpython-310-x86_64-linux-gnu.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7385b62f5ba4394196d3d96f1f080acd60ddce334f9122ca0b4a9509d7501d2b +size 320624 diff --git a/parrot/lib/python3.10/site-packages/pyarrow/tests/data/parquet/v0.7.1.all-named-index.parquet b/parrot/lib/python3.10/site-packages/pyarrow/tests/data/parquet/v0.7.1.all-named-index.parquet new file mode 100644 index 0000000000000000000000000000000000000000..9ac3cea6476f0799521059c755ca806751ded8ab --- /dev/null +++ b/parrot/lib/python3.10/site-packages/pyarrow/tests/data/parquet/v0.7.1.all-named-index.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60f1945edc3e4ec38f6e234389e647a1b369de8afb9c7840c491a39880c0caa1 +size 3948 diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/INSTALLER b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/LICENSE b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..3a97119010ac82e15e917a69b7b8f9f59b5a4601 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2014, Saurabh Kumar (python-dotenv), 2013, Ted Tieken (django-dotenv-rw), 2013, Jacob Kaplan-Moss (django-dotenv) + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +- Neither the name of django-dotenv nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/METADATA b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..b9af7fe6e6afdfed87afb5bfa9b862fd8c0aa79d --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/METADATA @@ -0,0 +1,692 @@ +Metadata-Version: 2.1 +Name: python-dotenv +Version: 1.0.1 +Summary: Read key-value pairs from a .env file and set them as environment variables +Home-page: https://github.com/theskumar/python-dotenv +Author: Saurabh Kumar +Author-email: me+github@saurabh-kumar.com +License: BSD-3-Clause +Keywords: environment variables,deployments,settings,env,dotenv,configurations,python +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Classifier: Environment :: Web Environment +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: cli +Requires-Dist: click >=5.0 ; extra == 'cli' + +# python-dotenv + +[![Build Status][build_status_badge]][build_status_link] +[![PyPI version][pypi_badge]][pypi_link] + +Python-dotenv reads key-value pairs from a `.env` file and can set them as environment +variables. It helps in the development of applications following the +[12-factor](https://12factor.net/) principles. + +- [Getting Started](#getting-started) +- [Other Use Cases](#other-use-cases) + * [Load configuration without altering the environment](#load-configuration-without-altering-the-environment) + * [Parse configuration as a stream](#parse-configuration-as-a-stream) + * [Load .env files in IPython](#load-env-files-in-ipython) +- [Command-line Interface](#command-line-interface) +- [File format](#file-format) + * [Multiline values](#multiline-values) + * [Variable expansion](#variable-expansion) +- [Related Projects](#related-projects) +- [Acknowledgements](#acknowledgements) + +## Getting Started + +```shell +pip install python-dotenv +``` + +If your application takes its configuration from environment variables, like a 12-factor +application, launching it in development is not very practical because you have to set +those environment variables yourself. + +To help you with that, you can add Python-dotenv to your application to make it load the +configuration from a `.env` file when it is present (e.g. in development) while remaining +configurable via the environment: + +```python +from dotenv import load_dotenv + +load_dotenv() # take environment variables from .env. + +# Code of your application, which uses environment variables (e.g. from `os.environ` or +# `os.getenv`) as if they came from the actual environment. +``` + +By default, `load_dotenv` doesn't override existing environment variables. + +To configure the development environment, add a `.env` in the root directory of your +project: + +``` +. +├── .env +└── foo.py +``` + +The syntax of `.env` files supported by python-dotenv is similar to that of Bash: + +```bash +# Development settings +DOMAIN=example.org +ADMIN_EMAIL=admin@${DOMAIN} +ROOT_URL=${DOMAIN}/app +``` + +If you use variables in values, ensure they are surrounded with `{` and `}`, like +`${DOMAIN}`, as bare variables such as `$DOMAIN` are not expanded. + +You will probably want to add `.env` to your `.gitignore`, especially if it contains +secrets like a password. + +See the section "File format" below for more information about what you can write in a +`.env` file. + +## Other Use Cases + +### Load configuration without altering the environment + +The function `dotenv_values` works more or less the same way as `load_dotenv`, except it +doesn't touch the environment, it just returns a `dict` with the values parsed from the +`.env` file. + +```python +from dotenv import dotenv_values + +config = dotenv_values(".env") # config = {"USER": "foo", "EMAIL": "foo@example.org"} +``` + +This notably enables advanced configuration management: + +```python +import os +from dotenv import dotenv_values + +config = { + **dotenv_values(".env.shared"), # load shared development variables + **dotenv_values(".env.secret"), # load sensitive variables + **os.environ, # override loaded values with environment variables +} +``` + +### Parse configuration as a stream + +`load_dotenv` and `dotenv_values` accept [streams][python_streams] via their `stream` +argument. It is thus possible to load the variables from sources other than the +filesystem (e.g. the network). + +```python +from io import StringIO + +from dotenv import load_dotenv + +config = StringIO("USER=foo\nEMAIL=foo@example.org") +load_dotenv(stream=config) +``` + +### Load .env files in IPython + +You can use dotenv in IPython. By default, it will use `find_dotenv` to search for a +`.env` file: + +```python +%load_ext dotenv +%dotenv +``` + +You can also specify a path: + +```python +%dotenv relative/or/absolute/path/to/.env +``` + +Optional flags: + +- `-o` to override existing variables. +- `-v` for increased verbosity. + +## Command-line Interface + +A CLI interface `dotenv` is also included, which helps you manipulate the `.env` file +without manually opening it. + +```shell +$ pip install "python-dotenv[cli]" +$ dotenv set USER foo +$ dotenv set EMAIL foo@example.org +$ dotenv list +USER=foo +EMAIL=foo@example.org +$ dotenv list --format=json +{ + "USER": "foo", + "EMAIL": "foo@example.org" +} +$ dotenv run -- python foo.py +``` + +Run `dotenv --help` for more information about the options and subcommands. + +## File format + +The format is not formally specified and still improves over time. That being said, +`.env` files should mostly look like Bash files. + +Keys can be unquoted or single-quoted. Values can be unquoted, single- or double-quoted. +Spaces before and after keys, equal signs, and values are ignored. Values can be followed +by a comment. Lines can start with the `export` directive, which does not affect their +interpretation. + +Allowed escape sequences: + +- in single-quoted values: `\\`, `\'` +- in double-quoted values: `\\`, `\'`, `\"`, `\a`, `\b`, `\f`, `\n`, `\r`, `\t`, `\v` + +### Multiline values + +It is possible for single- or double-quoted values to span multiple lines. The following +examples are equivalent: + +```bash +FOO="first line +second line" +``` + +```bash +FOO="first line\nsecond line" +``` + +### Variable without a value + +A variable can have no value: + +```bash +FOO +``` + +It results in `dotenv_values` associating that variable name with the value `None` (e.g. +`{"FOO": None}`. `load_dotenv`, on the other hand, simply ignores such variables. + +This shouldn't be confused with `FOO=`, in which case the variable is associated with the +empty string. + +### Variable expansion + +Python-dotenv can interpolate variables using POSIX variable expansion. + +With `load_dotenv(override=True)` or `dotenv_values()`, the value of a variable is the +first of the values defined in the following list: + +- Value of that variable in the `.env` file. +- Value of that variable in the environment. +- Default value, if provided. +- Empty string. + +With `load_dotenv(override=False)`, the value of a variable is the first of the values +defined in the following list: + +- Value of that variable in the environment. +- Value of that variable in the `.env` file. +- Default value, if provided. +- Empty string. + +## Related Projects + +- [Honcho](https://github.com/nickstenning/honcho) - For managing + Procfile-based applications. +- [django-dotenv](https://github.com/jpadilla/django-dotenv) +- [django-environ](https://github.com/joke2k/django-environ) +- [django-environ-2](https://github.com/sergeyklay/django-environ-2) +- [django-configuration](https://github.com/jezdez/django-configurations) +- [dump-env](https://github.com/sobolevn/dump-env) +- [environs](https://github.com/sloria/environs) +- [dynaconf](https://github.com/rochacbruno/dynaconf) +- [parse_it](https://github.com/naorlivne/parse_it) +- [python-decouple](https://github.com/HBNetwork/python-decouple) + +## Acknowledgements + +This project is currently maintained by [Saurabh Kumar](https://saurabh-kumar.com) and +[Bertrand Bonnefoy-Claudet](https://github.com/bbc2) and would not have been possible +without the support of these [awesome +people](https://github.com/theskumar/python-dotenv/graphs/contributors). + +[build_status_badge]: https://github.com/theskumar/python-dotenv/actions/workflows/test.yml/badge.svg +[build_status_link]: https://github.com/theskumar/python-dotenv/actions/workflows/test.yml +[pypi_badge]: https://badge.fury.io/py/python-dotenv.svg +[pypi_link]: https://badge.fury.io/py/python-dotenv +[python_streams]: https://docs.python.org/3/library/io.html + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this +project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.0.1] - 2024-01-23 + +**Fixed** + +* Gracefully handle code which has been imported from a zipfile ([#456] by [@samwyma]) +* Allow modules using load_dotenv to be reloaded when launched in a separate thread ([#497] by [@freddyaboulton]) +* Fix file not closed after deletion, handle error in the rewrite function ([#469] by [@Qwerty-133]) + +**Misc** +* Use pathlib.Path in tests ([#466] by [@eumiro]) +* Fix year in release date in changelog.md ([#454] by [@jankislinger]) +* Use https in README links ([#474] by [@Nicals]) + +## [1.0.0] - 2023-02-24 + +**Fixed** + +* Drop support for python 3.7, add python 3.12-dev (#449 by [@theskumar]) +* Handle situations where the cwd does not exist. (#446 by [@jctanner]) + +## [0.21.1] - 2023-01-21 + +**Added** + +* Use Python 3.11 non-beta in CI (#438 by [@bbc2]) +* Modernize variables code (#434 by [@Nougat-Waffle]) +* Modernize main.py and parser.py code (#435 by [@Nougat-Waffle]) +* Improve conciseness of cli.py and __init__.py (#439 by [@Nougat-Waffle]) +* Improve error message for `get` and `list` commands when env file can't be opened (#441 by [@bbc2]) +* Updated License to align with BSD OSI template (#433 by [@lsmith77]) + + +**Fixed** + +* Fix Out-of-scope error when "dest" variable is undefined (#413 by [@theGOTOguy]) +* Fix IPython test warning about deprecated `magic` (#440 by [@bbc2]) +* Fix type hint for dotenv_path var, add StrPath alias (#432 by [@eaf]) + +## [0.21.0] - 2022-09-03 + +**Added** + +* CLI: add support for invocations via 'python -m'. (#395 by [@theskumar]) +* `load_dotenv` function now returns `False`. (#388 by [@larsks]) +* CLI: add --format= option to list command. (#407 by [@sammck]) + +**Fixed** + +* Drop Python 3.5 and 3.6 and upgrade GA (#393 by [@eggplants]) +* Use `open` instead of `io.open`. (#389 by [@rabinadk1]) +* Improve documentation for variables without a value (#390 by [@bbc2]) +* Add `parse_it` to Related Projects (#410 by [@naorlivne]) +* Update README.md (#415 by [@harveer07]) +* Improve documentation with direct use of MkDocs (#398 by [@bbc2]) + +## [0.20.0] - 2022-03-24 + +**Added** + +- Add `encoding` (`Optional[str]`) parameter to `get_key`, `set_key` and `unset_key`. + (#379 by [@bbc2]) + +**Fixed** + +- Use dict to specify the `entry_points` parameter of `setuptools.setup` (#376 by + [@mgorny]). +- Don't build universal wheels (#387 by [@bbc2]). + +## [0.19.2] - 2021-11-11 + +**Fixed** + +- In `set_key`, add missing newline character before new entry if necessary. (#361 by + [@bbc2]) + +## [0.19.1] - 2021-08-09 + +**Added** + +- Add support for Python 3.10. (#359 by [@theskumar]) + +## [0.19.0] - 2021-07-24 + +**Changed** + +- Require Python 3.5 or a later version. Python 2 and 3.4 are no longer supported. (#341 + by [@bbc2]). + +**Added** + +- The `dotenv_path` argument of `set_key` and `unset_key` now has a type of `Union[str, + os.PathLike]` instead of just `os.PathLike` (#347 by [@bbc2]). +- The `stream` argument of `load_dotenv` and `dotenv_values` can now be a text stream + (`IO[str]`), which includes values like `io.StringIO("foo")` and `open("file.env", + "r")` (#348 by [@bbc2]). + +## [0.18.0] - 2021-06-20 + +**Changed** + +- Raise `ValueError` if `quote_mode` isn't one of `always`, `auto` or `never` in + `set_key` (#330 by [@bbc2]). +- When writing a value to a .env file with `set_key` or `dotenv set ` (#330 + by [@bbc2]): + - Use single quotes instead of double quotes. + - Don't strip surrounding quotes. + - In `auto` mode, don't add quotes if the value is only made of alphanumeric characters + (as determined by `string.isalnum`). + +## [0.17.1] - 2021-04-29 + +**Fixed** + +- Fixed tests for build environments relying on `PYTHONPATH` (#318 by [@befeleme]). + +## [0.17.0] - 2021-04-02 + +**Changed** + +- Make `dotenv get ` only show the value, not `key=value` (#313 by [@bbc2]). + +**Added** + +- Add `--override`/`--no-override` option to `dotenv run` (#312 by [@zueve] and [@bbc2]). + +## [0.16.0] - 2021-03-27 + +**Changed** + +- The default value of the `encoding` parameter for `load_dotenv` and `dotenv_values` is + now `"utf-8"` instead of `None` (#306 by [@bbc2]). +- Fix resolution order in variable expansion with `override=False` (#287 by [@bbc2]). + +## [0.15.0] - 2020-10-28 + +**Added** + +- Add `--export` option to `set` to make it prepend the binding with `export` (#270 by + [@jadutter]). + +**Changed** + +- Make `set` command create the `.env` file in the current directory if no `.env` file was + found (#270 by [@jadutter]). + +**Fixed** + +- Fix potentially empty expanded value for duplicate key (#260 by [@bbc2]). +- Fix import error on Python 3.5.0 and 3.5.1 (#267 by [@gongqingkui]). +- Fix parsing of unquoted values containing several adjacent space or tab characters + (#277 by [@bbc2], review by [@x-yuri]). + +## [0.14.0] - 2020-07-03 + +**Changed** + +- Privilege definition in file over the environment in variable expansion (#256 by + [@elbehery95]). + +**Fixed** + +- Improve error message for when file isn't found (#245 by [@snobu]). +- Use HTTPS URL in package meta data (#251 by [@ekohl]). + +## [0.13.0] - 2020-04-16 + +**Added** + +- Add support for a Bash-like default value in variable expansion (#248 by [@bbc2]). + +## [0.12.0] - 2020-02-28 + +**Changed** + +- Use current working directory to find `.env` when bundled by PyInstaller (#213 by + [@gergelyk]). + +**Fixed** + +- Fix escaping of quoted values written by `set_key` (#236 by [@bbc2]). +- Fix `dotenv run` crashing on environment variables without values (#237 by [@yannham]). +- Remove warning when last line is empty (#238 by [@bbc2]). + +## [0.11.0] - 2020-02-07 + +**Added** + +- Add `interpolate` argument to `load_dotenv` and `dotenv_values` to disable interpolation + (#232 by [@ulyssessouza]). + +**Changed** + +- Use logging instead of warnings (#231 by [@bbc2]). + +**Fixed** + +- Fix installation in non-UTF-8 environments (#225 by [@altendky]). +- Fix PyPI classifiers (#228 by [@bbc2]). + +## [0.10.5] - 2020-01-19 + +**Fixed** + +- Fix handling of malformed lines and lines without a value (#222 by [@bbc2]): + - Don't print warning when key has no value. + - Reject more malformed lines (e.g. "A: B", "a='b',c"). +- Fix handling of lines with just a comment (#224 by [@bbc2]). + +## [0.10.4] - 2020-01-17 + +**Added** + +- Make typing optional (#179 by [@techalchemy]). +- Print a warning on malformed line (#211 by [@bbc2]). +- Support keys without a value (#220 by [@ulyssessouza]). + +## 0.10.3 + +- Improve interactive mode detection ([@andrewsmith])([#183]). +- Refactor parser to fix parsing inconsistencies ([@bbc2])([#170]). + - Interpret escapes as control characters only in double-quoted strings. + - Interpret `#` as start of comment only if preceded by whitespace. + +## 0.10.2 + +- Add type hints and expose them to users ([@qnighy])([#172]) +- `load_dotenv` and `dotenv_values` now accept an `encoding` parameter, defaults to `None` + ([@theskumar])([@earlbread])([#161]) +- Fix `str`/`unicode` inconsistency in Python 2: values are always `str` now. ([@bbc2])([#121]) +- Fix Unicode error in Python 2, introduced in 0.10.0. ([@bbc2])([#176]) + +## 0.10.1 +- Fix parsing of variable without a value ([@asyncee])([@bbc2])([#158]) + +## 0.10.0 + +- Add support for UTF-8 in unquoted values ([@bbc2])([#148]) +- Add support for trailing comments ([@bbc2])([#148]) +- Add backslashes support in values ([@bbc2])([#148]) +- Add support for newlines in values ([@bbc2])([#148]) +- Force environment variables to str with Python2 on Windows ([@greyli]) +- Drop Python 3.3 support ([@greyli]) +- Fix stderr/-out/-in redirection ([@venthur]) + + +## 0.9.0 + +- Add `--version` parameter to cli ([@venthur]) +- Enable loading from current directory ([@cjauvin]) +- Add 'dotenv run' command for calling arbitrary shell script with .env ([@venthur]) + +## 0.8.1 + +- Add tests for docs ([@Flimm]) +- Make 'cli' support optional. Use `pip install python-dotenv[cli]`. ([@theskumar]) + +## 0.8.0 + +- `set_key` and `unset_key` only modified the affected file instead of + parsing and re-writing file, this causes comments and other file + entact as it is. +- Add support for `export` prefix in the line. +- Internal refractoring ([@theskumar]) +- Allow `load_dotenv` and `dotenv_values` to work with `StringIO())` ([@alanjds])([@theskumar])([#78]) + +## 0.7.1 + +- Remove hard dependency on iPython ([@theskumar]) + +## 0.7.0 + +- Add support to override system environment variable via .env. + ([@milonimrod](https://github.com/milonimrod)) + ([\#63](https://github.com/theskumar/python-dotenv/issues/63)) +- Disable ".env not found" warning by default + ([@maxkoryukov](https://github.com/maxkoryukov)) + ([\#57](https://github.com/theskumar/python-dotenv/issues/57)) + +## 0.6.5 + +- Add support for special characters `\`. + ([@pjona](https://github.com/pjona)) + ([\#60](https://github.com/theskumar/python-dotenv/issues/60)) + +## 0.6.4 + +- Fix issue with single quotes ([@Flimm]) + ([\#52](https://github.com/theskumar/python-dotenv/issues/52)) + +## 0.6.3 + +- Handle unicode exception in setup.py + ([\#46](https://github.com/theskumar/python-dotenv/issues/46)) + +## 0.6.2 + +- Fix dotenv list command ([@ticosax](https://github.com/ticosax)) +- Add iPython Support + ([@tillahoffmann](https://github.com/tillahoffmann)) + +## 0.6.0 + +- Drop support for Python 2.6 +- Handle escaped characters and newlines in quoted values. (Thanks + [@iameugenejo](https://github.com/iameugenejo)) +- Remove any spaces around unquoted key/value. (Thanks + [@paulochf](https://github.com/paulochf)) +- Added POSIX variable expansion. (Thanks + [@hugochinchilla](https://github.com/hugochinchilla)) + +## 0.5.1 + +- Fix find\_dotenv - it now start search from the file where this + function is called from. + +## 0.5.0 + +- Add `find_dotenv` method that will try to find a `.env` file. + (Thanks [@isms](https://github.com/isms)) + +## 0.4.0 + +- cli: Added `-q/--quote` option to control the behaviour of quotes + around values in `.env`. (Thanks + [@hugochinchilla](https://github.com/hugochinchilla)). +- Improved test coverage. + +[#78]: https://github.com/theskumar/python-dotenv/issues/78 +[#121]: https://github.com/theskumar/python-dotenv/issues/121 +[#148]: https://github.com/theskumar/python-dotenv/issues/148 +[#158]: https://github.com/theskumar/python-dotenv/issues/158 +[#170]: https://github.com/theskumar/python-dotenv/issues/170 +[#172]: https://github.com/theskumar/python-dotenv/issues/172 +[#176]: https://github.com/theskumar/python-dotenv/issues/176 +[#183]: https://github.com/theskumar/python-dotenv/issues/183 +[#359]: https://github.com/theskumar/python-dotenv/issues/359 +[#469]: https://github.com/theskumar/python-dotenv/issues/469 +[#456]: https://github.com/theskumar/python-dotenv/issues/456 +[#466]: https://github.com/theskumar/python-dotenv/issues/466 +[#454]: https://github.com/theskumar/python-dotenv/issues/454 +[#474]: https://github.com/theskumar/python-dotenv/issues/474 + +[@alanjds]: https://github.com/alanjds +[@altendky]: https://github.com/altendky +[@andrewsmith]: https://github.com/andrewsmith +[@asyncee]: https://github.com/asyncee +[@bbc2]: https://github.com/bbc2 +[@befeleme]: https://github.com/befeleme +[@cjauvin]: https://github.com/cjauvin +[@eaf]: https://github.com/eaf +[@earlbread]: https://github.com/earlbread +[@eggplants]: https://github.com/@eggplants +[@ekohl]: https://github.com/ekohl +[@elbehery95]: https://github.com/elbehery95 +[@eumiro]: https://github.com/eumiro +[@Flimm]: https://github.com/Flimm +[@freddyaboulton]: https://github.com/freddyaboulton +[@gergelyk]: https://github.com/gergelyk +[@gongqingkui]: https://github.com/gongqingkui +[@greyli]: https://github.com/greyli +[@harveer07]: https://github.com/@harveer07 +[@jadutter]: https://github.com/jadutter +[@jankislinger]: https://github.com/jankislinger +[@jctanner]: https://github.com/jctanner +[@larsks]: https://github.com/@larsks +[@lsmith77]: https://github.com/lsmith77 +[@mgorny]: https://github.com/mgorny +[@naorlivne]: https://github.com/@naorlivne +[@Nicals]: https://github.com/Nicals +[@Nougat-Waffle]: https://github.com/Nougat-Waffle +[@qnighy]: https://github.com/qnighy +[@Qwerty-133]: https://github.com/Qwerty-133 +[@rabinadk1]: https://github.com/@rabinadk1 +[@sammck]: https://github.com/@sammck +[@samwyma]: https://github.com/samwyma +[@snobu]: https://github.com/snobu +[@techalchemy]: https://github.com/techalchemy +[@theGOTOguy]: https://github.com/theGOTOguy +[@theskumar]: https://github.com/theskumar +[@ulyssessouza]: https://github.com/ulyssessouza +[@venthur]: https://github.com/venthur +[@x-yuri]: https://github.com/x-yuri +[@yannham]: https://github.com/yannham +[@zueve]: https://github.com/zueve + + +[Unreleased]: https://github.com/theskumar/python-dotenv/compare/v1.0.1...HEAD +[1.0.1]: https://github.com/theskumar/python-dotenv/compare/v1.0.0...v1.0.1 +[1.0.0]: https://github.com/theskumar/python-dotenv/compare/v0.21.0...v1.0.0 +[0.21.1]: https://github.com/theskumar/python-dotenv/compare/v0.21.0...v0.21.1 +[0.21.0]: https://github.com/theskumar/python-dotenv/compare/v0.20.0...v0.21.0 +[0.20.0]: https://github.com/theskumar/python-dotenv/compare/v0.19.2...v0.20.0 +[0.19.2]: https://github.com/theskumar/python-dotenv/compare/v0.19.1...v0.19.2 +[0.19.1]: https://github.com/theskumar/python-dotenv/compare/v0.19.0...v0.19.1 +[0.19.0]: https://github.com/theskumar/python-dotenv/compare/v0.18.0...v0.19.0 +[0.18.0]: https://github.com/theskumar/python-dotenv/compare/v0.17.1...v0.18.0 +[0.17.1]: https://github.com/theskumar/python-dotenv/compare/v0.17.0...v0.17.1 +[0.17.0]: https://github.com/theskumar/python-dotenv/compare/v0.16.0...v0.17.0 +[0.16.0]: https://github.com/theskumar/python-dotenv/compare/v0.15.0...v0.16.0 +[0.15.0]: https://github.com/theskumar/python-dotenv/compare/v0.14.0...v0.15.0 +[0.14.0]: https://github.com/theskumar/python-dotenv/compare/v0.13.0...v0.14.0 +[0.13.0]: https://github.com/theskumar/python-dotenv/compare/v0.12.0...v0.13.0 +[0.12.0]: https://github.com/theskumar/python-dotenv/compare/v0.11.0...v0.12.0 +[0.11.0]: https://github.com/theskumar/python-dotenv/compare/v0.10.5...v0.11.0 +[0.10.5]: https://github.com/theskumar/python-dotenv/compare/v0.10.4...v0.10.5 +[0.10.4]: https://github.com/theskumar/python-dotenv/compare/v0.10.3...v0.10.4 diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/RECORD b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2ca01ca6afa3d4fba0bb30b99e2e6f107e934188 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/RECORD @@ -0,0 +1,26 @@ +../../../bin/dotenv,sha256=mb_Jc2w6MgdMK6xP-odZEBo03FwJAMn19kJ5BYydn-0,224 +dotenv/__init__.py,sha256=WBU5SfSiKAhS3hzu17ykNuuwbuwyDCX91Szv4vUeOuM,1292 +dotenv/__main__.py,sha256=N0RhLG7nHIqtlJHwwepIo-zbJPNx9sewCCRGY528h_4,129 +dotenv/__pycache__/__init__.cpython-310.pyc,, +dotenv/__pycache__/__main__.cpython-310.pyc,, +dotenv/__pycache__/cli.cpython-310.pyc,, +dotenv/__pycache__/ipython.cpython-310.pyc,, +dotenv/__pycache__/main.cpython-310.pyc,, +dotenv/__pycache__/parser.cpython-310.pyc,, +dotenv/__pycache__/variables.cpython-310.pyc,, +dotenv/__pycache__/version.cpython-310.pyc,, +dotenv/cli.py,sha256=_ttQuR9Yl4k1PT53ByISkDjJ3kO_N_LzIDZzZ95uXEk,5809 +dotenv/ipython.py,sha256=avI6aez_RxnBptYgchIquF2TSgKI-GOhY3ppiu3VuWE,1303 +dotenv/main.py,sha256=GV7Ki6JYPDa-xy2ZXHKqER-bRvKa7qqh0G0OwffYJr8,12098 +dotenv/parser.py,sha256=QgU5HwMwM2wMqt0vz6dHTJ4nzPmwqRqvi4MSyeVifgU,5186 +dotenv/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 +dotenv/variables.py,sha256=CD0qXOvvpB3q5RpBQMD9qX6vHX7SyW-SuiwGMFSlt08,2348 +dotenv/version.py,sha256=d4QHYmS_30j0hPN8NmNPnQ_Z0TphDRbu4MtQj9cT9e8,22 +python_dotenv-1.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +python_dotenv-1.0.1.dist-info/LICENSE,sha256=gGGbcEnwjIFoOtDgHwjyV6hAZS3XHugxRtNmWMfSwrk,1556 +python_dotenv-1.0.1.dist-info/METADATA,sha256=fCkcTEUG3zknbuN1BK8e0PPCIgvPBLk-LneK0mRDM_s,23170 +python_dotenv-1.0.1.dist-info/RECORD,, +python_dotenv-1.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +python_dotenv-1.0.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +python_dotenv-1.0.1.dist-info/entry_points.txt,sha256=yRl1rCbswb1nQTQ_gZRlCw5QfabztUGnfGWLhlXFNdI,47 +python_dotenv-1.0.1.dist-info/top_level.txt,sha256=eyqUH4SHJNr6ahOYlxIunTr4XinE8Z5ajWLdrK3r0D8,7 diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/REQUESTED b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/WHEEL b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..98c0d20b7a64f4f998d7913e1d38a05dba20916c --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/entry_points.txt b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a8682329417cc65dc220a0a7ec7a8efb1f221e4 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +dotenv = dotenv.__main__:cli diff --git a/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/top_level.txt b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..fe7c01aa90e2b2c3c1794c9e1c00aaa360b25358 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/python_dotenv-1.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +dotenv diff --git a/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/INSTALLER b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/RECORD b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..edb0a7af459f85455042b9f4f9c738475fe05617 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/RECORD @@ -0,0 +1,377 @@ +../../../bin/proton,sha256=emvZrMZGg6VXn9pFRUlu_gwlGL2BGJ8kp0RSuWjx3tw,233 +../../../bin/proton-viewer,sha256=TiGL54xqKPSkRt6ChAT8N59KL0-cbGjdjB_mhweNssY,233 +triton-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +triton-3.0.0.dist-info/METADATA,sha256=4tyc8sAbLgPPdRNNzGF51D5QjcO-c8LxtGdWHDKMnvE,1325 +triton-3.0.0.dist-info/RECORD,, +triton-3.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +triton-3.0.0.dist-info/WHEEL,sha256=fMEGhanlpGdC8mhEdSaDPDawYpkODE__oOu48u0N5vY,161 +triton-3.0.0.dist-info/entry_points.txt,sha256=SAiHYj5xxm1U5d8569PbMXmtWkKGNtiyy7LeTlUHalM,99 +triton-3.0.0.dist-info/top_level.txt,sha256=Hb_kDzJ7TsGI6NCIladkPkdkXovbkIWxpHevKN759pc,261 +triton/_C/libproton.so,sha256=Vrn4g8Ibi29h0v163G3_o8dOb3RQoIGJRkdvX-vVpDM,15424128 +triton/_C/libtriton.so,sha256=xPu23AtnHofUgcYD0Etf23fWGkAwxjcj_6obDJ-FieU,472232760 +triton/__init__.py,sha256=3tgO9HpoNxYuT2svdKtZyZHYm2INM30av_nlazmSXjs,1347 +triton/__pycache__/__init__.cpython-310.pyc,, +triton/__pycache__/errors.cpython-310.pyc,, +triton/__pycache__/testing.cpython-310.pyc,, +triton/backends/__init__.py,sha256=IqprKlbbTM6n-JCl6VeE5VU8kVOiaIP7UZSQPz8w4dw,1600 +triton/backends/__pycache__/__init__.cpython-310.pyc,, +triton/backends/__pycache__/compiler.cpython-310.pyc,, +triton/backends/__pycache__/driver.cpython-310.pyc,, +triton/backends/amd/__pycache__/compiler.cpython-310.pyc,, +triton/backends/amd/__pycache__/driver.cpython-310.pyc,, +triton/backends/amd/compiler.py,sha256=eVT-rZmK822lJwxbOW7tRdNrSb7FqfaL3PnoXDC5Qxw,11084 +triton/backends/amd/driver.c,sha256=-r1CUNk1RrMaUTIqO6iFjOW9MwLlZ_YRGR9LjIep93g,8397 +triton/backends/amd/driver.py,sha256=WZm05plz7mKqMkzs5msaP6ldSLrwSkqIoQ0_D2QVRCQ,15959 +triton/backends/amd/include/hip/amd_detail/amd_channel_descriptor.h,sha256=_2myGIdBTE0plFbGKOSx8HUqGZd0UBHo-YvKe2xkpbU,11708 +triton/backends/amd/include/hip/amd_detail/amd_device_functions.h,sha256=zfYTHJE_M_y2Y2ssP8ZH_EOczMBg4Iq2guglaKcI5js,31425 +triton/backends/amd/include/hip/amd_detail/amd_hip_atomic.h,sha256=PJRRTp83M0jIEBA_iWzfWwHZelSbL3TBrSDqlO3SQtk,49919 +triton/backends/amd/include/hip/amd_detail/amd_hip_bf16.h,sha256=fucv1_06JHVm82T0TmvERBbmtZTDQK6WJi_58oGQOXg,40634 +triton/backends/amd/include/hip/amd_detail/amd_hip_bfloat16.h,sha256=cFJlQEELGau_9geACeuiiFHyuAWCD6-VuSqcTnqajX0,9484 +triton/backends/amd/include/hip/amd_detail/amd_hip_common.h,sha256=dzkuIzuklqTRaNJjKLqfFEm6Fh4tK_FkTjYHFsZkmCI,1370 +triton/backends/amd/include/hip/amd_detail/amd_hip_complex.h,sha256=SEygl8X_MCXDVXxNIBm5Ds0eWwa-ojVXUUW48SIgsX8,5855 +triton/backends/amd/include/hip/amd_detail/amd_hip_cooperative_groups.h,sha256=SvrkniHiDGt-ztZRBvbkyajfUxTbGQzpZC1gnd4T-i8,31624 +triton/backends/amd/include/hip/amd_detail/amd_hip_fp16.h,sha256=86Nw97iaiC4QV5xBv8d3Bwc4FioMh5DQuCHj3sh_Yrw,57854 +triton/backends/amd/include/hip/amd_detail/amd_hip_gl_interop.h,sha256=9vxiV6rYRMGx12TPnrAVRvrfLyoRp74XRgKSPBPa2hk,3860 +triton/backends/amd/include/hip/amd_detail/amd_hip_math_constants.h,sha256=u1fIaf-AiWF70ZA1zxVkUIbRqoJLu5lrfYbgt_usySk,5890 +triton/backends/amd/include/hip/amd_detail/amd_hip_runtime.h,sha256=ZvDsQ0AiZnJ178NuAsA7AuHrySXbN3aFs5Z9m2tsIDg,13954 +triton/backends/amd/include/hip/amd_detail/amd_hip_runtime_pt_api.h,sha256=fc4mtHBkWmiSRh8m-dxIxvu9zsweLTwEgohkntYcgJw,9997 +triton/backends/amd/include/hip/amd_detail/amd_hip_unsafe_atomics.h,sha256=w9nJ1S32GRl_ejDiGacteM6Zf84iovIifAzWX8Bze0Q,24202 +triton/backends/amd/include/hip/amd_detail/amd_hip_vector_types.h,sha256=qPdmRJnzlgtjVshkafoHxdHoMLkoYS9U-ZD-TjLznr0,57088 +triton/backends/amd/include/hip/amd_detail/amd_math_functions.h,sha256=46wiaEMStCczEsHtccgHlATfw_0O5j6Z8rlFkC7bmUA,3171 +triton/backends/amd/include/hip/amd_detail/amd_surface_functions.h,sha256=rsQuylNqmNhLb7PZjBz7WbruD_6YIXtOptY2BNJDxVU,11062 +triton/backends/amd/include/hip/amd_detail/amd_warp_functions.h,sha256=p8DdtuxqlgGHzKdVPMHDnZOD8zA5f6GjLHYMr0_FKjQ,18966 +triton/backends/amd/include/hip/amd_detail/concepts.hpp,sha256=7EOkpr2w2-jclUQ115yxtFCkBWJ7btUzhBOe-mR0N0M,1252 +triton/backends/amd/include/hip/amd_detail/device_library_decls.h,sha256=4clSpgf898UVjfZFVnDkcYi75A27crPsuFtLcs1s4KU,7457 +triton/backends/amd/include/hip/amd_detail/functional_grid_launch.hpp,sha256=u7hRB9kQXX575a5C7cV3gKow55DSBUCwO0dTjIswlag,8129 +triton/backends/amd/include/hip/amd_detail/grid_launch.h,sha256=tNS7CQw9gy-z930CElH3n6c5iMvpsQ_WFZK024mNzEo,1830 +triton/backends/amd/include/hip/amd_detail/grid_launch.hpp,sha256=EuAlM3olyrArebqwW5eSxo4gfjvWCGOAGAuLLmFttgw,1370 +triton/backends/amd/include/hip/amd_detail/grid_launch_GGL.hpp,sha256=KpQAuyy1Dyt45WcPaR_x-Ex-onPGEHA01DBbla7TT-k,1219 +triton/backends/amd/include/hip/amd_detail/helpers.hpp,sha256=hi2pW1mXQnbIwvmwWt_nG6A38sqLOd-QP5S9sETTs60,5707 +triton/backends/amd/include/hip/amd_detail/hip_api_trace.hpp,sha256=d01j4SFQP_6ALwUHByxznZV8SrQHbuujRYon8rxFw-I,94612 +triton/backends/amd/include/hip/amd_detail/hip_assert.h,sha256=fNsG23KISuY-k5JFoX-5hZ7qGQScisXuHcdEwYlXOqw,3978 +triton/backends/amd/include/hip/amd_detail/hip_cooperative_groups_helper.h,sha256=tQ_XIvGKhvrj1h7gY-IVLmKvIPhsQa0YsBflxdhUHP8,7957 +triton/backends/amd/include/hip/amd_detail/hip_fp16_gcc.h,sha256=BtFsKmTptN4TOHocEicfNbBl2JCdZWKm_bd5mc5OzYY,6660 +triton/backends/amd/include/hip/amd_detail/hip_fp16_math_fwd.h,sha256=63tKWMPdW56qWlH_HbCaF_isVXufm514ol_SxL4YjTQ,5134 +triton/backends/amd/include/hip/amd_detail/hip_ldg.h,sha256=KAEZb9H4z4DDrkaloMOeWzahiDfI2V6c68vWT3jb5fU,3652 +triton/backends/amd/include/hip/amd_detail/hip_prof_str.h,sha256=s1T2IrCwYzZQOuCs5ppuegFQbjXSF2JA1eUSCmZg9AA,621355 +triton/backends/amd/include/hip/amd_detail/hip_runtime_prof.h,sha256=6GVfh1la0wtBVwdKX5y0C32dPD9shJp1o8wZdHsjZHA,2715 +triton/backends/amd/include/hip/amd_detail/host_defines.h,sha256=h_ZpFE4Clm2iyRyJevDb57Y-gC-6RVPjhnZ5rzPxiUo,7038 +triton/backends/amd/include/hip/amd_detail/hsa_helpers.hpp,sha256=Os-sJQOFI_0Abh8Ql05s0Rtfruk4NsSMfg7BtugxMgg,3232 +triton/backends/amd/include/hip/amd_detail/macro_based_grid_launch.hpp,sha256=6ocsArNa9_R6D6XCuNy8Zq23KG-j2uYsjqNCtnMrJws,67925 +triton/backends/amd/include/hip/amd_detail/math_fwd.h,sha256=nup5YhceJnngoLJCESI8qX08dNpbZci0i78WKu-wfdI,17000 +triton/backends/amd/include/hip/amd_detail/ockl_image.h,sha256=LzRPGMb515_iIAIIcbb2uQB-bTvT4xOjY51VdARD7lc,10538 +triton/backends/amd/include/hip/amd_detail/program_state.hpp,sha256=8QE9OmB8OKTy7rBr3EYEizJI2s-_1tgXpgU7zCA2Ky0,3154 +triton/backends/amd/include/hip/amd_detail/texture_fetch_functions.h,sha256=Ex1lF2gBWJxtC3yP9pXRSFywMp3gbEmyl0Sw8iL91yM,17787 +triton/backends/amd/include/hip/amd_detail/texture_indirect_functions.h,sha256=KkW5o5gMpoVMTRwzfXHA7-kZ9ynI8OaIw6jJ1EB1s98,18447 +triton/backends/amd/include/hip/channel_descriptor.h,sha256=gTYe7SzIg-m3ThOQY2vr5Rh6-uWvUP_d37v8F4T2Q14,1773 +triton/backends/amd/include/hip/device_functions.h,sha256=vkybrdk6wyZP-T1I5PRjtfcMqGYXDeBpB5jhYj358GU,1589 +triton/backends/amd/include/hip/driver_types.h,sha256=m1HI80HC80qkTeco2Jd07woL_jTy48lz9JiDCV_8zsg,18985 +triton/backends/amd/include/hip/hip_bf16.h,sha256=lLw6K5ltb6AqSuINYTq8flxxsDkBP8Y2zbqmUjBcG9c,1571 +triton/backends/amd/include/hip/hip_bfloat16.h,sha256=Nqoy9VjfjglVx2_NJcp8hyT1sJUukXRWj8XMlidv1yA,1755 +triton/backends/amd/include/hip/hip_common.h,sha256=q5aPhG3DHW0iUJ7ayS5lfM_ZnZQNbMmLmfdHlOwbPdA,3450 +triton/backends/amd/include/hip/hip_complex.h,sha256=TmdzQP5oVPfhBVARJYcR5eyv9HInmKMFuFoQ_1ECk_I,1594 +triton/backends/amd/include/hip/hip_cooperative_groups.h,sha256=gMLvaYQ3b-f1vcoMtEwtkN0hO5__zNfP5p5oBKmv_SE,1878 +triton/backends/amd/include/hip/hip_deprecated.h,sha256=gFLuCuKn7R_xCfum_i_Q-vi3Lg8NWHKphKZKze8DwEo,6340 +triton/backends/amd/include/hip/hip_ext.h,sha256=jK1Qc-SXgUyRTj8bBa9ZP__95Qgd2-W1mwnJo6Qpnoo,8560 +triton/backends/amd/include/hip/hip_fp16.h,sha256=vKJh-zgDWUW7NyXxtv2ho6aVLXX8BIPfzCigEQ5d6I4,1523 +triton/backends/amd/include/hip/hip_gl_interop.h,sha256=-GwkSFMBneM8akFE7pqlhi0k-Ft2uz5674wGoiaU43Q,1438 +triton/backends/amd/include/hip/hip_hcc.h,sha256=RYrArDlnTEP89xKbzIpW17_bsBY5moCitq00PL-4oWI,1307 +triton/backends/amd/include/hip/hip_math_constants.h,sha256=8bSfve5E7cDuvNAUkFUeQwSLg3iJJHuqhuD4FmHNxEM,1588 +triton/backends/amd/include/hip/hip_profile.h,sha256=sjsNuduu5Jd6s7sJndZvZLlE0RZ0wN1rTVwv5nR7If0,1304 +triton/backends/amd/include/hip/hip_runtime.h,sha256=uy90l8Nep6xNUzeGcHMoDv84BT3hMpieTV-5ijkpL5A,3058 +triton/backends/amd/include/hip/hip_runtime_api.h,sha256=fzb_xktisCVcp2pWG-ZKhIG-YVQzDjGyPt4wvA4iayM,386498 +triton/backends/amd/include/hip/hip_texture_types.h,sha256=AhkvjG4cDjf_ZFLg5SsSTfBnXG614PBK1XVPa7irZbk,1237 +triton/backends/amd/include/hip/hip_vector_types.h,sha256=6FcBMBkP3ZN1Enalpa9hV0VopxdBJvbUCuaxISgzbTY,1630 +triton/backends/amd/include/hip/hip_version.h,sha256=J3vgzfZH0UkK8RYvyHVj1PbUNSZH1JPtlcmXxLBgwVk,407 +triton/backends/amd/include/hip/hiprtc.h,sha256=npK6f2ZkYIe5blJIGuofuTG0PrSMS2mkFBUqrdOp0A0,15631 +triton/backends/amd/include/hip/library_types.h,sha256=tPOJTQedPH5qC9meawLgKpnbFrQC2WKlfo6s0rhKoZc,2370 +triton/backends/amd/include/hip/math_functions.h,sha256=frzdJ4veBG8n9ALO4EmRrdOiDguR6FP6ygLnvOnVVSM,1815 +triton/backends/amd/include/hip/surface_types.h,sha256=uQHjITphDM7k4pnuEoDEupMUxBobzvhJpSy0unpegh4,1959 +triton/backends/amd/include/hip/texture_types.h,sha256=CtmdykZfDikhnrVfdJk3w2VK5X3Af_6rEKzU-VgLu24,6687 +triton/backends/amd/include/hsa/Brig.h,sha256=5H-btCHq40qgjjpwVAoRWf3E0ccf-J6UCPEcKx_hGKw,32705 +triton/backends/amd/include/hsa/amd_hsa_common.h,sha256=q_zN0eq-dwR7FnQ84PcpV3yZyvjHsouIAjJgKltGoX8,3912 +triton/backends/amd/include/hsa/amd_hsa_elf.h,sha256=_9Zp3EWioseu3ljShNbwNe84AmRWNfjDxRZuj0jJUSY,16305 +triton/backends/amd/include/hsa/amd_hsa_kernel_code.h,sha256=C55F8a480QsW16-iwN9TIT3cKnGh6GoeoEaEv3aVh4g,12659 +triton/backends/amd/include/hsa/amd_hsa_queue.h,sha256=ZJ-k5wY30heLmQnGB0VUz36XCiVHRmspg5FRNMGIk_U,4766 +triton/backends/amd/include/hsa/amd_hsa_signal.h,sha256=FDegZnWQC04GtnqHjXOBsB-AoVSaqdhNY6Mwbua5FGA,2947 +triton/backends/amd/include/hsa/hsa.h,sha256=Jft1K5uFAcasOD9IYW6wD5GsGQcPQTrmbpjie-0Wh00,190916 +triton/backends/amd/include/hsa/hsa_amd_tool.h,sha256=pyZSyIVl-UA5AOhte78jvn4V3hCd0dxJAIv7KeADsPs,2843 +triton/backends/amd/include/hsa/hsa_api_trace.h,sha256=2iuwHcpyW9wvr-WPKCgatQzYBaA8rTa3w1BRMXBGcSI,28982 +triton/backends/amd/include/hsa/hsa_ext_amd.h,sha256=Riw3Ii-AYts1w_yjVD96ZXuY6-BBpnlx_bnnltThK1s,116016 +triton/backends/amd/include/hsa/hsa_ext_finalize.h,sha256=sv0AZbDM-B1wIdQ3cHTMlpUtNacQN2PkOgX90IZol_o,20227 +triton/backends/amd/include/hsa/hsa_ext_image.h,sha256=t5YJm_aw9EePCeFL1hoIfQ8ubIjBte-ptfReq6Ts-8Y,54232 +triton/backends/amd/include/hsa/hsa_ven_amd_aqlprofile.h,sha256=9uev2nT29MCdu7-HMkg9iItHop6QMOBMQL5DAFnftSg,19777 +triton/backends/amd/include/hsa/hsa_ven_amd_loader.h,sha256=c6cxPAzAox7u6IbFzEkQZfCuRl-Kr39WhY2_w23X1R4,26146 +triton/backends/amd/include/roctracer/ext/prof_protocol.h,sha256=6FAcvVD-dNM7uulFs2B-aTxw5xOAWGy6evdD4yUaebA,3849 +triton/backends/amd/include/roctracer/hip_ostream_ops.h,sha256=WNXFZxawBXHmFGMDFIOZqXkCw6VzyDexwGPkGJre4w0,184840 +triton/backends/amd/include/roctracer/hsa_ostream_ops.h,sha256=AYwF-IT9Dhl2FX-GuvCJZX6fSmHK0xkKLORx9QxuSK8,57857 +triton/backends/amd/include/roctracer/hsa_prof_str.h,sha256=ctT-KKsIGayp7RUGUsFNR-dE65VydyXla_Qgvf-efTU,122884 +triton/backends/amd/include/roctracer/roctracer.h,sha256=B8sHz2DMNprP7EqNWIGwVLY1KQMpxmhfVy4UoR8dzzY,23849 +triton/backends/amd/include/roctracer/roctracer_ext.h,sha256=vLaZ8peAxSy0cwrdEalKnUApkKspfa04iw1Mr_Zcio0,2940 +triton/backends/amd/include/roctracer/roctracer_hcc.h,sha256=NlF3R8JQ9oX9lGpm0b2n-EWJ0r3y9sP9wbwnoucaCuY,1303 +triton/backends/amd/include/roctracer/roctracer_hip.h,sha256=RCzYuNw1vLR7xK4rb06TtM9TU546UYKHJ83IMHmZEm8,1432 +triton/backends/amd/include/roctracer/roctracer_hsa.h,sha256=M8APM64XNAWSslxQisM-pcmKoUQaUdTMaKvSACyt0Ag,4108 +triton/backends/amd/include/roctracer/roctracer_plugin.h,sha256=8GGE1zDbdPCVJtbmwOCYq7X0mwFjfWRtzDYKLD4cKys,4786 +triton/backends/amd/include/roctracer/roctracer_roctx.h,sha256=gBjBk5vb0l3PbBSQ7V9iFtaM_RzkIDJEW1A_PXBihBM,2014 +triton/backends/amd/include/roctracer/roctx.h,sha256=RhJXUXRhSJ5LRE_1gm7E6-bjEMrfcFBLDLuf3UxAIh8,6717 +triton/backends/amd/lib/ockl.bc,sha256=wQKCzkKukIHbu0lyjKUYlhndc7S27xto6L54J0Bn-C0,246124 +triton/backends/amd/lib/ocml.bc,sha256=UPNTXW0gCXUNB-c6orSYwb-mz9_mjUc7zny_vfFza44,205964 +triton/backends/compiler.py,sha256=ILAX6cTYWKsF54P3ffULhsbW7uXXnz9LFYVitKVPhEM,2720 +triton/backends/driver.py,sha256=9EM4ox4FNCkLCGUwUIBMP6u95AOm0wBK4E8MKElfCAI,977 +triton/backends/nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +triton/backends/nvidia/__pycache__/__init__.cpython-310.pyc,, +triton/backends/nvidia/__pycache__/compiler.cpython-310.pyc,, +triton/backends/nvidia/__pycache__/driver.cpython-310.pyc,, +triton/backends/nvidia/bin/cuobjdump,sha256=FLKFErTLe_YgWmaukj-B8lkDrW6il4BbWWX2S0X_b1s,663040 +triton/backends/nvidia/bin/nvdisasm,sha256=rwo7W-VxMOzwUKMQdn01SkxzCzCjvzuIwQDcPJvL6-o,50683112 +triton/backends/nvidia/bin/ptxas,sha256=lN2lShZzlA1W0wcsZO96rLEloeZDlFhuEPd6el_w_4c,30314080 +triton/backends/nvidia/compiler.py,sha256=qXmJa5wKz9Qd2VO5C9xLEJaPfAtFoobq4_gpqt2yu1U,13246 +triton/backends/nvidia/driver.c,sha256=Qc4fWOCoqphl5muQ1YszB7tkxaAGgw5JVDRE-vViAvw,17309 +triton/backends/nvidia/driver.py,sha256=2ncSmPe8RdJKYfNe9RdCMEQh3xq0nVC_H8UQ3VpG6ks,13289 +triton/backends/nvidia/include/Openacc/cupti_openacc.h,sha256=Z0OM5e_hbd3cxdXyn3SCHqBBQawLg4QORnlm57Cr2-M,3513 +triton/backends/nvidia/include/Openmp/cupti_openmp.h,sha256=E1WNmeb_7HaUSmBegtUNe4IV1i7pXeNxgzIlyKn1zrM,3491 +triton/backends/nvidia/include/Openmp/omp-tools.h,sha256=AmuC_xPC7VPu3B-W4PmXuCNufFawhY8PjNXePaQFAOg,37403 +triton/backends/nvidia/include/builtin_types.h,sha256=JxT9Vf2q2snxTBOL9ACzNmYzTWACO2VOVUu1KdFt7_g,3150 +triton/backends/nvidia/include/channel_descriptor.h,sha256=no_vNky02LeMLI0CF8GDVGHaPm_uRUGcVUMYdt_Xn4U,21482 +triton/backends/nvidia/include/common_functions.h,sha256=22LTZRVcPZzEH6MJda7nNMCvMgIjSTe0OKR7sEQj6kc,3410 +triton/backends/nvidia/include/cooperative_groups.h,sha256=JUBW-C1x_7WWuNOaoorTKQab0qzrykkG8oAw1mEHZ2s,60332 +triton/backends/nvidia/include/cooperative_groups/details/async.h,sha256=xsEHCZP3nuEY3l2p8SU2d1226XiXumUvDP_Gyh8PdVY,19122 +triton/backends/nvidia/include/cooperative_groups/details/coalesced_reduce.h,sha256=pBQgFY7i64V87XNATg1UEIQHVNYOItQtHjS5B4yn8pc,4257 +triton/backends/nvidia/include/cooperative_groups/details/coalesced_scan.h,sha256=DfZv5d5W0XJv-tZVhgrIdjLjs6aCx_u0oy1lDIpjo1Q,7314 +triton/backends/nvidia/include/cooperative_groups/details/driver_abi.h,sha256=v-ZUb4UgGKJk6NR2WCWHD3x_42y-togI1urFn70Gi-g,3964 +triton/backends/nvidia/include/cooperative_groups/details/functional.h,sha256=2BV8i8Bidz0kgxuYkJCAbwFxOIZRyzHgG-c_rVKhRzc,8905 +triton/backends/nvidia/include/cooperative_groups/details/helpers.h,sha256=K9jvxnXc5-6Fum1KG4EQKJJrVZ4BhHOSAJbZR4uDL0c,26476 +triton/backends/nvidia/include/cooperative_groups/details/info.h,sha256=Ij_cqIrcXCcwlaQqCL7AHzMD4H89y0tJeQXCbjTGsFo,12578 +triton/backends/nvidia/include/cooperative_groups/details/invoke.h,sha256=Osq3K-tZuXHVCMQJ708PjPo-BwMhjhjApO4b0TYLFJg,8616 +triton/backends/nvidia/include/cooperative_groups/details/memory.h,sha256=WU28eUcYLA1z131VYGulR4eVCSN9xK9KSxbV656YPs0,5484 +triton/backends/nvidia/include/cooperative_groups/details/partitioning.h,sha256=4UXuvUmZvGANy0hd4erdBNllpgnn4K4qFWWlfzAsHO8,7125 +triton/backends/nvidia/include/cooperative_groups/details/reduce.h,sha256=UfMezM5pqRIotJjmuFgOmiMvbu49sYgjraHutmVVr0w,22111 +triton/backends/nvidia/include/cooperative_groups/details/scan.h,sha256=-Ttwb2AfEEY_tsmqJjR2dojkPpoRx387SoqxgvfdBtQ,17166 +triton/backends/nvidia/include/cooperative_groups/details/sync.h,sha256=zoiBicvB7rlXa_r_VSNuvHVwrLIM7EjF_KdmhvPj1LM,10638 +triton/backends/nvidia/include/cooperative_groups/memcpy_async.h,sha256=erOIHuObdfxRhBWfrXE3wsZF4B2GUuqwzQrsPwKPpbg,2960 +triton/backends/nvidia/include/cooperative_groups/reduce.h,sha256=B0hgDkqM-6ueqTTgb3b34A0RH4vGz8mBf5e2jT1dJ1o,2949 +triton/backends/nvidia/include/cooperative_groups/scan.h,sha256=2EU6T5cWNwftm2B7FicV31PojoI61yo5fHXGRYkGk40,2940 +triton/backends/nvidia/include/crt/common_functions.h,sha256=-U44f4yUGmwDPwd7Q_3Cz5if05xHGPSlAzz5zMylLSQ,13559 +triton/backends/nvidia/include/crt/cudacc_ext.h,sha256=KW6n0ImOZKS0VqVmBHWTXtHI816hh88YeEgUg2aYdVU,3224 +triton/backends/nvidia/include/crt/device_double_functions.h,sha256=A1vB3g0qwnNEfcpT1d9RiGDaxqPXXgYr-Vxe2oMHyxY,39938 +triton/backends/nvidia/include/crt/device_double_functions.hpp,sha256=YYIbqYhb5Qmf8c4YfcC_jytg4FRwcXPjv3TFTwhb24E,8568 +triton/backends/nvidia/include/crt/device_functions.h,sha256=txuWyo2qoqRZTomi3BSjwUbFvtD9Ea0WKamRgMFQzjQ,136370 +triton/backends/nvidia/include/crt/device_functions.hpp,sha256=9BxQiHjRuETOIntxXAlmTPKp8wlXrBKTPcBaSUQmwfQ,38985 +triton/backends/nvidia/include/crt/func_macro.h,sha256=EOpDlaM917bh9cwBiFBPF689DCMBw5hFarxLxFt-i74,1755 +triton/backends/nvidia/include/crt/host_config.h,sha256=ZnNRtvunIV0ctARy5qbTC1fa5-JpSK5eZ5u5SCcu_BM,12169 +triton/backends/nvidia/include/crt/host_defines.h,sha256=agpWQb4K25fhOP_RsrIuz1L_vPeC2AkbmJY12QgpXKc,9950 +triton/backends/nvidia/include/crt/host_runtime.h,sha256=lOpmkxFZVkEp8dcMAGEZRITsh-19o9jy39kdSNLc3Ng,10284 +triton/backends/nvidia/include/crt/math_functions.h,sha256=iYVBIFDocDsPxqaeKHeeTxAsY-zf04-zfkmETyeahuc,396266 +triton/backends/nvidia/include/crt/math_functions.hpp,sha256=u-CGbd0R2FZWdKG-6bdmGSor9KT_wnmISj63lPQKASM,100207 +triton/backends/nvidia/include/crt/mma.h,sha256=BgSSvJ_IR75W-3uLlC2yE6B7rHeWtamaNn6-XzYU73U,62564 +triton/backends/nvidia/include/crt/mma.hpp,sha256=spo0LX71tUCipxK517Bssj0nc-ZHf8oMWzvHoYYB_6I,66599 +triton/backends/nvidia/include/crt/nvfunctional,sha256=FDM0zqWO6bl9jpJKz9U8CMbjt6iTKh18tQalxAvRsag,16900 +triton/backends/nvidia/include/crt/sm_70_rt.h,sha256=Kf830xymA-zmF7LsunFHLSNyhhT5UiJMocgoHBQeNns,6837 +triton/backends/nvidia/include/crt/sm_70_rt.hpp,sha256=3a_rU-Y0MSB4htBDFY4PCQ_jXiWFTe7WT1ZyhMuCJOA,7837 +triton/backends/nvidia/include/crt/sm_80_rt.h,sha256=MdJHWCRzLM__nDDf1go61rDsl9ydOW3oi6SZBfjUyc8,7743 +triton/backends/nvidia/include/crt/sm_80_rt.hpp,sha256=o-rJu-jpehCeyABGgv-8dYRB7oJTCwuNdvSCq0VURdE,6705 +triton/backends/nvidia/include/crt/sm_90_rt.h,sha256=an47m0XFBaJ3pUX9MlE4-nktP1jb3eJUXhQ3ntZtzc8,11445 +triton/backends/nvidia/include/crt/sm_90_rt.hpp,sha256=YuqVygGV6rgtWtx1J9cPpEI3BXKQBII-Ez6oZFP3wrE,9228 +triton/backends/nvidia/include/crt/storage_class.h,sha256=dzcOZ16pLaN8ejqHaXw4iHbBJ6fXWxfaU-sj2QjYzzg,4791 +triton/backends/nvidia/include/cuComplex.h,sha256=WpcgpaiPhU_o9sTPMcNTEZuyXDIc8x3sz4dUWSztL2g,12186 +triton/backends/nvidia/include/cuda.h,sha256=29OuNnfs8Hb2sqCXHUKy3VudXxzN8050d0oW_C33ysE,1048458 +triton/backends/nvidia/include/cudaEGL.h,sha256=_CwaQ4cEP1vfNyBSSd5qFxznPCYOovF6Cpj-QWSIBq4,39544 +triton/backends/nvidia/include/cudaEGLTypedefs.h,sha256=xF_FAN1Kar9oyHJ3cCU7jztTpxX8WylpiuYyYpGGHek,5645 +triton/backends/nvidia/include/cudaGL.h,sha256=gMT1HPGa-siuji0gAsKYr4X45Lc29HKglC_ttNSGyUM,22501 +triton/backends/nvidia/include/cudaGLTypedefs.h,sha256=dClpQI-LuXgF9rPSBsj7OkIg8g_fXDjT0hLZS8TGpOg,6576 +triton/backends/nvidia/include/cudaProfilerTypedefs.h,sha256=F2aWLIKv_AhNbxNOaZVcRsxIh0kuscnV8UMWWxkBAlY,3297 +triton/backends/nvidia/include/cudaTypedefs.h,sha256=0hWYyV-KM7R5Qjagz9UP1ldhAZDHGIcJmYtYvB_nwNc,110387 +triton/backends/nvidia/include/cudaVDPAU.h,sha256=Np7Nc2Wjaz--hkpbhW6f9aapr-NbcPDAgkot0sJerco,12694 +triton/backends/nvidia/include/cudaVDPAUTypedefs.h,sha256=wz8nyOUdwM9mH9JO3QZW-A9dyxt-IufSX7nggSXpCNs,4144 +triton/backends/nvidia/include/cuda_awbarrier.h,sha256=3ZH-ZlXODhSiwSY9rqSni_EQwi25QMHP6Tm-zOdxBwE,9340 +triton/backends/nvidia/include/cuda_awbarrier_helpers.h,sha256=OCskCts5bCKl_RKBe9M74zKSIsVpePn44S_aJp1tFXE,12489 +triton/backends/nvidia/include/cuda_awbarrier_primitives.h,sha256=n5__E1jYYDhlgH-f3u8MQjtz57UZ7v5VshhMye1eicM,4699 +triton/backends/nvidia/include/cuda_bf16.h,sha256=2BKEN_8pbieiBHShSfIawa-Oy_3jJzQAl74TqoLQ3MQ,185707 +triton/backends/nvidia/include/cuda_bf16.hpp,sha256=ZJlZSkQJ65G0yhMPDAq3m-oMaEJ3ia9FOsbgnzCtPS0,137924 +triton/backends/nvidia/include/cuda_device_runtime_api.h,sha256=bIhfusirXe5-osOTPAILDh6pY8MW1hefyZvTD_IzgqM,46249 +triton/backends/nvidia/include/cuda_egl_interop.h,sha256=PNWYns30MIytJQHSOh7UbZYlaTX5e0bavzK14tde_C8,37109 +triton/backends/nvidia/include/cuda_fp16.h,sha256=1J7SldpmJk8SNDGD3SO0yVrsLoHkpN1VnMtRZr2Gbcs,175974 +triton/backends/nvidia/include/cuda_fp16.hpp,sha256=JyedVIUALPBiR_Ci3Rxef_sUs9VvDiP4MDc97Yk_Ys8,123259 +triton/backends/nvidia/include/cuda_fp8.h,sha256=Q3OP5o_3rSYbKtVIlcXVr_CncU3SPM-09j605e2Zegw,13833 +triton/backends/nvidia/include/cuda_fp8.hpp,sha256=b-PcyZgei5MmIp6op0QQ40BgNupO_ei648hG_dUS-FQ,64246 +triton/backends/nvidia/include/cuda_gl_interop.h,sha256=VQEswFeOBF6JN6Q0pdlkvc5WT7bD1FnTfKewvANulCc,19150 +triton/backends/nvidia/include/cuda_occupancy.h,sha256=Kr9HyOe-hlRjBAzbINwUYkNgbbIgIjuvKs09UZhMYQo,67179 +triton/backends/nvidia/include/cuda_pipeline.h,sha256=0enXG49wN4JajlQi3ahbp2ei_ufTY_Mznic7zfWmKHM,8130 +triton/backends/nvidia/include/cuda_pipeline_helpers.h,sha256=bo1L7e6vCuM-K3Il8K1z4wJUja5DyXQKdo_hSWUME-E,13852 +triton/backends/nvidia/include/cuda_pipeline_primitives.h,sha256=FnJJtuV6rHr6LgL56XDwilcSbFr6W1Hj6mf1AJaMI20,8675 +triton/backends/nvidia/include/cuda_runtime.h,sha256=a-OXWPsmKSPst7mRCCxHNZV7m-uRLCAY8oGRi-dJzPA,90683 +triton/backends/nvidia/include/cuda_runtime_api.h,sha256=7Ys9yv_2trFEVybtbh-UJKnDKG8fHWvUjSX4cgZGCck,608580 +triton/backends/nvidia/include/cuda_stdint.h,sha256=XbFOk9CtJjKqk7PpYNqbSVsDxAsVM8avA4rWpPi0BjQ,4093 +triton/backends/nvidia/include/cuda_surface_types.h,sha256=Mw5Lo4b8Q-f9mogOvATGyHhu9d2t2K6XOxuqtZrSh3A,3688 +triton/backends/nvidia/include/cuda_texture_types.h,sha256=ITbX-JNnP7Rm-JSgNVdJ9pq6k8FVor8RbnruDsKq6sk,3688 +triton/backends/nvidia/include/cuda_vdpau_interop.h,sha256=bXQanWc2IFXZAKWNGl2xAz9nLvFmQpWyGrsDvfeS9FA,7727 +triton/backends/nvidia/include/cudart_platform.h,sha256=YN6sKhB0b9w5tGX1IYL7ulJVPrWAiX9A44qLv4EtW5Q,2717 +triton/backends/nvidia/include/cupti.h,sha256=JkVyAGTIMYzwm62dfVqas3nMcILhgP_Wdz6fh4_NED0,4697 +triton/backends/nvidia/include/cupti_activity.h,sha256=1aNI_zmQnjAguMBU0UqqMR_heE77FiafQkZl9or_1Ww,210387 +triton/backends/nvidia/include/cupti_activity_deprecated.h,sha256=rYJsoAJxA2BTT50-olN8EYcSzdlXBpRbR1ATLG3rVIM,121526 +triton/backends/nvidia/include/cupti_callbacks.h,sha256=zrEVRb0hubSfD69QUmHsJiL8oAfvqyuKGcTVRihQrnc,29729 +triton/backends/nvidia/include/cupti_checkpoint.h,sha256=rTz8JoWxqESBXyZWUhZJGm4xeYcx4OJOtJ7Ld13T_b0,5264 +triton/backends/nvidia/include/cupti_common.h,sha256=85m74bxUgXp3tEaPQpezeazmpsNMw41PsjNSYmQdT20,3514 +triton/backends/nvidia/include/cupti_driver_cbid.h,sha256=dHKyQYZbBbdlxixzFkIoNHg5IfGXdgriyjN1Bu1i6g4,74462 +triton/backends/nvidia/include/cupti_events.h,sha256=f7lLGmD2e8FzvMhRgnn0-v7U0vTpUkiQHIpQxgARGb0,51896 +triton/backends/nvidia/include/cupti_metrics.h,sha256=iLAOlDrcbHEsIIUmgq0Tp1ZOY9O3Ot3wj2-bI8iYbSs,32148 +triton/backends/nvidia/include/cupti_nvtx_cbid.h,sha256=_azPtR1g4qivvX7qbvHRUg0RHCWF7iEOJyHMN9qZe9E,5912 +triton/backends/nvidia/include/cupti_pcsampling.h,sha256=ycJHT36DmPIaVzHsB3xxjXkhFyEfMCJOl3LbCsHFgyA,32144 +triton/backends/nvidia/include/cupti_pcsampling_util.h,sha256=lx8CaNXowJe5Zvc06LE-u_Zry_jODs1mM6j9Q5WIX9E,12430 +triton/backends/nvidia/include/cupti_profiler_target.h,sha256=JsceoDuhllWNEzaO0xxT81dJ55NrbF0UtRJJgit0P_E,32131 +triton/backends/nvidia/include/cupti_result.h,sha256=a-C4Y7LAYCiCT1ngOfoDuTi2stEG1YTafwwn6UfL-LU,12603 +triton/backends/nvidia/include/cupti_runtime_cbid.h,sha256=11pXl0MdmTtxUngel-ru4JdqWvF_gEIG14aQExRyfzI,46436 +triton/backends/nvidia/include/cupti_sass_metrics.h,sha256=3RW9snJuFQdOhrEn3wDJOru05q0V_zssWrqD7tvVJKw,19674 +triton/backends/nvidia/include/cupti_target.h,sha256=x4Vz1Upb6m9ixmVpmGaKQldDWYQI3OZ-ocEXGzNK0EE,1263 +triton/backends/nvidia/include/cupti_version.h,sha256=sjd-aUoTGkEWyvA2VUWIpZwXyXAaclqC8gbwNnuK5D0,4425 +triton/backends/nvidia/include/device_atomic_functions.h,sha256=OR2jNSfSKzaFri74zh4Vtz5M0z9UDBU3rKeC1rYaVQs,9500 +triton/backends/nvidia/include/device_atomic_functions.hpp,sha256=0e7MOiNNUnnloXpB_r9WT5YOws5cxgzQQAzRCYvgaFA,10486 +triton/backends/nvidia/include/device_double_functions.h,sha256=KUxId5Z1fx8SWfLRTxPD7RB-zN7zslzb4n7JaJLfL3I,3452 +triton/backends/nvidia/include/device_functions.h,sha256=bWSrhTYE9NQlss7xMSMEVusvto9j2fgUDXWVH2W_cOA,3410 +triton/backends/nvidia/include/device_launch_parameters.h,sha256=H1_CC-vvAaS26ys4XsTFkMgTxUTciAjdjswjizkisvQ,3846 +triton/backends/nvidia/include/device_types.h,sha256=2LFxoZBJPoA5V0H1EbKTEaXDi3GDJPtzOPdRHDaucIQ,3588 +triton/backends/nvidia/include/driver_functions.h,sha256=cN3IjRAz2Mj2Pj35SyxJIkZNDDusnJqaqzBdMzpQKbA,4625 +triton/backends/nvidia/include/driver_types.h,sha256=4eBQ10Nzgfs2BlxGaGHVMWLvnJfKrEnMml9zfFi0DyA,177782 +triton/backends/nvidia/include/fatbinary_section.h,sha256=NnuUfy358yGJx4enq0pBnetjv17UWa-nOlgYToUitrw,1809 +triton/backends/nvidia/include/generated_cudaGL_meta.h,sha256=dfd2QuaRdEjbStOKvaQLi1Md_qrpRQh8PfyZznJ8bWY,3115 +triton/backends/nvidia/include/generated_cudaVDPAU_meta.h,sha256=fAedsoQxaU3hIAApAWDOKsa9kgcuQw4tdyf8klLm-3k,1453 +triton/backends/nvidia/include/generated_cuda_gl_interop_meta.h,sha256=LXOqvQCej0sCgAT1LUKKYZ466EFxN4hIwf9oIhXOLF0,2250 +triton/backends/nvidia/include/generated_cuda_meta.h,sha256=hawYpDe0xpaDFDnClXI91JjwCRxWb-AS0FS8ydUMgxc,94639 +triton/backends/nvidia/include/generated_cuda_runtime_api_meta.h,sha256=D8CbAN3-jLuF2KGfsBHXEELSgL92KrUAiDvugWE8B8M,69706 +triton/backends/nvidia/include/generated_cuda_vdpau_interop_meta.h,sha256=8OLqWN26aEYpTWUXtbHJvA5GYhVv3ybYVOTW7yK37z8,1367 +triton/backends/nvidia/include/generated_cudart_removed_meta.h,sha256=X3I5WXmhtsJNNlgY7coJ5vg4t11G5FRR6Xo7MboIeck,5172 +triton/backends/nvidia/include/generated_nvtx_meta.h,sha256=YHb_RD8g3s4m8PJn7Z0wnxvUHarl7BOAX5ADr-BL3HI,7513 +triton/backends/nvidia/include/host_config.h,sha256=BscH_GazAZbbotddVzL5RmafbQ-QjRx8f-I1O01IBW8,3380 +triton/backends/nvidia/include/host_defines.h,sha256=bBQwQF5C1N1c2qpLV56g1c-weu9Ysgz-gIf2Kn3uz_A,3386 +triton/backends/nvidia/include/library_types.h,sha256=p6746aCd_A_1VlgKRhLJChzeZ4tN7e4HBH2Hm7hDjbU,4836 +triton/backends/nvidia/include/math_constants.h,sha256=cV6hAyQe8X7f7MBtaKjjIJq3BycOUDp6I5cizJX5HLw,7608 +triton/backends/nvidia/include/math_functions.h,sha256=5XcC6j-fJKttvhwc4hZNoLHNw808a2ZYIOtZ7ry7yd0,3398 +triton/backends/nvidia/include/mma.h,sha256=IY_VenxuEncwGq92MhrWUb-Xswh0ekAXLy9Rbxhxa2Y,2932 +triton/backends/nvidia/include/nvPTXCompiler.h,sha256=z_v0P6Sj0KfDQBmAKIdgFoPOylhsO4B221w3KDUqbM0,12076 +triton/backends/nvidia/include/nvfunctional,sha256=IkFoCi_Q4OhP9nEuBI-5jWwFlR_PfG05hJH7lSMsfWc,2975 +triton/backends/nvidia/include/nvperf_common.h,sha256=BqPml9AxyN10-ptWT3hQzh2JUWqQX57Q5BjQ3ZuaKNs,17255 +triton/backends/nvidia/include/nvperf_cuda_host.h,sha256=aBnyIr_hexPDGBkP6WSujN1mI_DYP25sEIXWYY1O7VI,8298 +triton/backends/nvidia/include/nvperf_host.h,sha256=afdHG6eraeo4ltlF9ihskqhU7IccxcRCaZDZ6_ikjkg,68506 +triton/backends/nvidia/include/nvperf_target.h,sha256=ZDA-JI459tLBW4iLLCQjYYRAMeHwfqDIgXbVqVLDYZ4,22539 +triton/backends/nvidia/include/sm_20_atomic_functions.h,sha256=x4ycINVq__l9B4SQPD-I48jQbKxxdBmgp8Vf2GO0Qfg,4478 +triton/backends/nvidia/include/sm_20_atomic_functions.hpp,sha256=1l5NLM8DhDbqYZ_E51LoqElQJXObkbwo57d3r-4uEbE,4107 +triton/backends/nvidia/include/sm_20_intrinsics.h,sha256=a4jDSp_DUW0d09g5wgEm_I7bGTAe73HKRinkhBKQBis,51048 +triton/backends/nvidia/include/sm_20_intrinsics.hpp,sha256=BhEBuXSKBsNGJDBJDtYL0cGRI3wX_w_OIgA5D-YxIWk,7694 +triton/backends/nvidia/include/sm_30_intrinsics.h,sha256=b6W8Vxp9vD9OCJI6lZuGyZYXEdQ3Ei8PTAloHNkwCcQ,16978 +triton/backends/nvidia/include/sm_30_intrinsics.hpp,sha256=yX0ebd265tJ-BDhvluP2BhadPuWXpRZPI2eeQFFt5ys,24567 +triton/backends/nvidia/include/sm_32_atomic_functions.h,sha256=HGnZgQHACE2AAb6zabGUURc53IsVZelc2BSJqvs9OgY,5703 +triton/backends/nvidia/include/sm_32_atomic_functions.hpp,sha256=CQTTvOEYp-s5hqAgLvAon11vLYDrDp8cTHdel-XRzBQ,6592 +triton/backends/nvidia/include/sm_32_intrinsics.h,sha256=Xdkogdsjy1vh8u3eGu0i5xTmHxBGAjj6_vVGR-spdOE,33539 +triton/backends/nvidia/include/sm_32_intrinsics.hpp,sha256=Gl8aSLDLcit4W3pKQS19GsDG8RYcwD65HwYB_CeZe8M,70616 +triton/backends/nvidia/include/sm_35_atomic_functions.h,sha256=a3XoEsKRCEOf0Q_5Y__rMfmC4pScv4VkUggVgVJVn44,2909 +triton/backends/nvidia/include/sm_35_intrinsics.h,sha256=0mS5-LCgvZiTvL7-MG_4YwI-zWGvM-s4xyRuMkunMC8,2664 +triton/backends/nvidia/include/sm_60_atomic_functions.h,sha256=_anfNaJsvQpDEorYeUKIkbizYkwrinBcG_ZCiECtLqI,13178 +triton/backends/nvidia/include/sm_60_atomic_functions.hpp,sha256=cgIKddDn2B3QzYlzeBILAP1IRys74QCCxsH0QqaVGls,22903 +triton/backends/nvidia/include/sm_61_intrinsics.h,sha256=h_MBL1UUDxQX_qOddSImzqyFjcrhhm_63G97pGDyreU,10902 +triton/backends/nvidia/include/sm_61_intrinsics.hpp,sha256=N-nQvcBsPMT2Umy5zR69c9K1q366W-Jqe7NpoLTqTmg,6787 +triton/backends/nvidia/include/surface_functions.h,sha256=b1O82SAvEgWWxA9uZTWQcGimzZUoem2QbAET3wh3fZc,6782 +triton/backends/nvidia/include/surface_indirect_functions.h,sha256=vy9QuFVV-ezZP-x2RT9RLp2qIUgdngACOCmalSfVFPA,10877 +triton/backends/nvidia/include/surface_types.h,sha256=XkFXD1nHbeSMgajR-UJE9uQ7TByzJnjdnUL4-yGiufk,4530 +triton/backends/nvidia/include/texture_fetch_functions.h,sha256=KLCmUxf5aY5_UalX8tSFB6e4TrjA8hyUPxLOkMFltAo,12468 +triton/backends/nvidia/include/texture_indirect_functions.h,sha256=lH_y3Ni-hq4RZ0_PMFbBM0th5-OmTn3TtqtpkHHhA8w,21163 +triton/backends/nvidia/include/texture_types.h,sha256=73ntVyg8r8fzKy5VIk6yuvC45GDeWepaLIqIk-M3Ri8,6360 +triton/backends/nvidia/include/vector_functions.h,sha256=WypGkL-IDbGOlay7g_G0p3HO7OLGRE0Do__JtiFoWxY,8003 +triton/backends/nvidia/include/vector_functions.hpp,sha256=afXhNSd3LFTZo96EPtesTLfvxd4nTmLVzgkj967rTRg,10060 +triton/backends/nvidia/include/vector_types.h,sha256=6CJ4yt3KD7zQVfm1NhrgqNYYEDEIZWwaivlFx12nhNg,13396 +triton/backends/nvidia/lib/libdevice.10.bc,sha256=XC-uN8huaMOjhgWpX1EtfRLV89uYYxC-R_VzBKpype4,473728 +triton/compiler/__init__.py,sha256=PD2VOiqnb3qUrti77C_E83lX8Rch-jLVwN98I7XiMRA,256 +triton/compiler/__pycache__/__init__.cpython-310.pyc,, +triton/compiler/__pycache__/code_generator.cpython-310.pyc,, +triton/compiler/__pycache__/compiler.cpython-310.pyc,, +triton/compiler/__pycache__/errors.cpython-310.pyc,, +triton/compiler/__pycache__/make_launcher.cpython-310.pyc,, +triton/compiler/code_generator.py,sha256=g11PwzUA4azK_FxeAPT3PYdDLBFIYMBLaISb9vlCuOU,57757 +triton/compiler/compiler.py,sha256=3HZ8j63PI-qC154lfDfUQHfq5_aBz5Z1Zf1D6cAXk3s,16605 +triton/compiler/errors.py,sha256=I9Y15pDWcL9heY4SWWdLeMDtW6Iiq2pFXzKfJ6dY_C0,1732 +triton/compiler/make_launcher.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +triton/errors.py,sha256=8WfnuRKLG578mgY6cBA3ECruVMf9ULEKFNgRcJ6IhWM,89 +triton/language/__init__.py,sha256=j2x4eORgWxCDtbAMIj67qPpl4DzeJiffxhqkCLvIBNU,4716 +triton/language/__pycache__/__init__.cpython-310.pyc,, +triton/language/__pycache__/core.cpython-310.pyc,, +triton/language/__pycache__/math.cpython-310.pyc,, +triton/language/__pycache__/random.cpython-310.pyc,, +triton/language/__pycache__/semantic.cpython-310.pyc,, +triton/language/__pycache__/standard.cpython-310.pyc,, +triton/language/core.py,sha256=cTMPOU5YSw3ylZXUn2rIrAxVA9uRRwkNxYrYiM66x74,89244 +triton/language/extra/__init__.py,sha256=8krf1SOD94ZnkeuqXUWl0syCblbuL9KF9DjoXSAf5kM,64 +triton/language/extra/__pycache__/__init__.cpython-310.pyc,, +triton/language/extra/__pycache__/libdevice.cpython-310.pyc,, +triton/language/extra/cuda/__init__.py,sha256=L-L0lztxn9O06wGzVyhmQRFQ_EI-6gyB65iEzO7oEB4,290 +triton/language/extra/cuda/__pycache__/__init__.cpython-310.pyc,, +triton/language/extra/cuda/__pycache__/libdevice.cpython-310.pyc,, +triton/language/extra/cuda/__pycache__/utils.cpython-310.pyc,, +triton/language/extra/cuda/libdevice.py,sha256=WiYuVmetUT8F74Q8auQtDnkSQU_-rkyF0dOgujDhuJA,56033 +triton/language/extra/cuda/utils.py,sha256=e1BslV7lZGhi2uVIlo5lI9dcN61HUMIU2asPaRjsyIo,4379 +triton/language/extra/hip/__init__.py,sha256=ieSER4LeX9_0horChGUUVwpuKAprkuka8uGAkEBDyDM,49 +triton/language/extra/hip/__pycache__/__init__.cpython-310.pyc,, +triton/language/extra/hip/__pycache__/libdevice.cpython-310.pyc,, +triton/language/extra/hip/libdevice.py,sha256=NaAqjBuLcc2e9XOxZi4eYM1wc8El3iCrsYCfWgWp-28,16551 +triton/language/extra/libdevice.py,sha256=wNGqO71EcHrrnN9ArQQ6znwSpywXu-OaUwVMBoVPjKI,14729 +triton/language/math.py,sha256=o0vg065LOsmu3hlc_aQvigoJeyvJZC-1lnPOaztgfxA,7332 +triton/language/random.py,sha256=NhMKN68bGaDexWmqCNMLAMdMjwK2tjKZnYbeoWkUZ5I,6736 +triton/language/semantic.py,sha256=NtQvBCmq4CfLmFtTuavGFvrk2tng6gOVPh6ftG0PuaA,73108 +triton/language/standard.py,sha256=5dLLckwI0O9BMPO6hY0izyH4NL_ZcKU4iqatKmurkfk,13132 +triton/ops/__init__.py,sha256=Yo_IfcP54HxucFaQNc4aOtfOGryUcQZUDA4aONg6sHk,324 +triton/ops/__pycache__/__init__.cpython-310.pyc,, +triton/ops/__pycache__/cross_entropy.cpython-310.pyc,, +triton/ops/__pycache__/flash_attention.cpython-310.pyc,, +triton/ops/__pycache__/matmul.cpython-310.pyc,, +triton/ops/__pycache__/matmul_perf_model.cpython-310.pyc,, +triton/ops/blocksparse/__init__.py,sha256=6YEVQNzipgQCpoO_7B8H7ckaSW2Idt1244s7IyLWAwc,100 +triton/ops/blocksparse/__pycache__/__init__.cpython-310.pyc,, +triton/ops/blocksparse/__pycache__/matmul.cpython-310.pyc,, +triton/ops/blocksparse/__pycache__/softmax.cpython-310.pyc,, +triton/ops/blocksparse/matmul.py,sha256=S29Wv0X47AUoCMfSw7A7-Lt6lUyGPzy63Q8pcD41O1w,15920 +triton/ops/blocksparse/softmax.py,sha256=2jfmu1Bn9XsM4PyBsSRaSi3-XK0bJABxwQ-XsTwo7fg,8243 +triton/ops/cross_entropy.py,sha256=Jr-iQ6oZQir8gh4WRmlPoh_CY4fM8x9c9dDsuavyFyQ,3451 +triton/ops/flash_attention.py,sha256=1W8-D9OFJWAYmNhsFipKufHb1ZNEOIuz4ZMq_3HEq3s,18030 +triton/ops/matmul.py,sha256=kKVeZG7t31g_iS9Sk2Y-XJc3GzP5DTwmcv11OUAE4-4,9257 +triton/ops/matmul_perf_model.py,sha256=E8LuqIrb-u_NCqSDD0r9hHNPkPKCTMTKJNAVOuZomaU,6697 +triton/profiler/__init__.py,sha256=8MMGWMNsHxvgFva8l6o9lzUcAdGjpxiQouuTwJ4qkdQ,184 +triton/profiler/__pycache__/__init__.cpython-310.pyc,, +triton/profiler/__pycache__/flags.cpython-310.pyc,, +triton/profiler/__pycache__/hook.cpython-310.pyc,, +triton/profiler/__pycache__/profile.cpython-310.pyc,, +triton/profiler/__pycache__/proton.cpython-310.pyc,, +triton/profiler/__pycache__/scope.cpython-310.pyc,, +triton/profiler/__pycache__/viewer.cpython-310.pyc,, +triton/profiler/flags.py,sha256=BFBKQnozRN9Jp18_S5MuIeu5CJMW7_I38pM55qOg2oQ,604 +triton/profiler/hook.py,sha256=1FqwAGrdmmzWIyy3qqPH3-3OHtQtdN64FRwEnizXCx8,1100 +triton/profiler/profile.py,sha256=RXz6bej6-Z33i1CLH9aGSgegQb1LMKWwbnwdIyLSlt4,5832 +triton/profiler/proton.py,sha256=f1cokCi2wYzCOnl8ztPb-_xc-uKSBMW3h3uJajvYuX8,2624 +triton/profiler/scope.py,sha256=gwsjiwrXH16_SMHEooGM3KLLe7XIowjFvd__L5t4WSg,3125 +triton/profiler/viewer.py,sha256=0cHhg6gOe2t4_JA9GXp8wBthFySO6Hw8kYW2PWwjwMM,9635 +triton/runtime/__init__.py,sha256=mKL5cqIBDUw2WO80NRCh4s1G8KYaqgM59TTAbTkPPjQ,621 +triton/runtime/__pycache__/__init__.cpython-310.pyc,, +triton/runtime/__pycache__/autotuner.cpython-310.pyc,, +triton/runtime/__pycache__/build.cpython-310.pyc,, +triton/runtime/__pycache__/cache.cpython-310.pyc,, +triton/runtime/__pycache__/driver.cpython-310.pyc,, +triton/runtime/__pycache__/errors.cpython-310.pyc,, +triton/runtime/__pycache__/interpreter.cpython-310.pyc,, +triton/runtime/__pycache__/jit.cpython-310.pyc,, +triton/runtime/autotuner.py,sha256=ndJ_wuOVaXCBVsjmIzFX1IRPHTAH6FKUm-1CHpOhNQs,14974 +triton/runtime/build.py,sha256=7PqCGjCdwjakAJq6FRxnJ8CQtUmWBNeYqgqYdfks1G0,2594 +triton/runtime/cache.py,sha256=Fmr6AgDubT7XPe9Lan6WE_czbSS7ZFZ__8qkdvaWkSE,9759 +triton/runtime/driver.py,sha256=VZ-883Xri71R72lHB6usIpLo3gGLbZJkAlLP3ewWSpc,1509 +triton/runtime/errors.py,sha256=oj73dn34qJbLhOjakakAuZPSv-laZyIYylJiJwREA8Y,787 +triton/runtime/interpreter.py,sha256=NWnlemxOKQGQPnYUbIhiQJQkRKgGo2yuUnc2dnAnTdc,49061 +triton/runtime/jit.py,sha256=dUK_klRkYzHtkM4W_hv7LK-H1310jYQ5aqtEyAJ9UIs,34722 +triton/testing.py,sha256=1GS3ZmMuNNK2Opg990qJUdlH2AfIKZvV-zNZRYJa4Ys,18347 +triton/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +triton/tools/__pycache__/__init__.cpython-310.pyc,, +triton/tools/__pycache__/build_extern.cpython-310.pyc,, +triton/tools/__pycache__/compile.cpython-310.pyc,, +triton/tools/__pycache__/disasm.cpython-310.pyc,, +triton/tools/__pycache__/link.cpython-310.pyc,, +triton/tools/build_extern.py,sha256=jCr-2hu3nLGBIJhCGUQ1jAyzLttughjkiPGEwRFjLR0,13673 +triton/tools/compile.c,sha256=rjuAQ8b-2DTtbj29SgK1NxJI5BSU2P9ccp9wa5p8Iyc,2090 +triton/tools/compile.h,sha256=n9QKIFZTL4RSsiXtAxBP9XGSnxjyaevQQ9bBpwDsvAg,332 +triton/tools/compile.py,sha256=CR1_-TBz77rMeN9lN2pc6EtErQwfQBdBMQvpWKr-ezs,6468 +triton/tools/disasm.py,sha256=U58GRL7v14hu4-B_kWkciHaY9jVIkTKg7DtioH4LTHo,5080 +triton/tools/link.py,sha256=u7qtfZRLriZkAMEGNvj8YF-k1cthmLL7BwHYqBgT63E,11871 diff --git a/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/REQUESTED b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/entry_points.txt b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..fec7e033ca5aee50e0b944b9c14f2987c668d505 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +proton = triton.profiler.proton:main +proton-viewer = triton.profiler.viewer:main diff --git a/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/top_level.txt b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..1016c8445b5539e982b3a35cd969e4b758233df1 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/triton-3.0.0.dist-info/top_level.txt @@ -0,0 +1,15 @@ +triton +triton/_C +triton/backends +triton/backends/amd +triton/backends/nvidia +triton/compiler +triton/language +triton/language/extra +triton/language/extra/cuda +triton/language/extra/hip +triton/ops +triton/ops/blocksparse +triton/profiler +triton/runtime +triton/tools diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/__init__.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b2656a5072a1599c4702837ce8b833d95d3271a Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/__init__.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/composer.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/composer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c68aa456358c5fd77b99636f9aebff6f2a826bc9 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/composer.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/constructor.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/constructor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..894342127ea4c1c1041667aef3c598c8286bb31b Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/constructor.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/error.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/error.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10f266e86664c2592584b94fce53ed61b3b01a51 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/error.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/parser.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b67f395217ed2feb4aa12c62745e037a0fe92fd7 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/parser.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/reader.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/reader.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ce186f001132f99383fc4eda5cfc94ce9cab2e2 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/reader.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/representer.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/representer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66a3dea94f32bc33bc061ae4da6edb7d6e0d5b1e Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/representer.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/scanner.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/scanner.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cda0e92421677470ecf317a987c7a157905fda1d Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/scanner.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/__pycache__/serializer.cpython-310.pyc b/parrot/lib/python3.10/site-packages/yaml/__pycache__/serializer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50aae80f543bb5b45e94eb9849fe3afa467db1a9 Binary files /dev/null and b/parrot/lib/python3.10/site-packages/yaml/__pycache__/serializer.cpython-310.pyc differ diff --git a/parrot/lib/python3.10/site-packages/yaml/nodes.py b/parrot/lib/python3.10/site-packages/yaml/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..c4f070c41e1fb1bc01af27d69329e92dded38908 --- /dev/null +++ b/parrot/lib/python3.10/site-packages/yaml/nodes.py @@ -0,0 +1,49 @@ + +class Node(object): + def __init__(self, tag, value, start_mark, end_mark): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + value = self.value + #if isinstance(value, list): + # if len(value) == 0: + # value = '' + # elif len(value) == 1: + # value = '<1 item>' + # else: + # value = '<%d items>' % len(value) + #else: + # if len(value) > 75: + # value = repr(value[:70]+u' ... ') + # else: + # value = repr(value) + value = repr(value) + return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) + +class ScalarNode(Node): + id = 'scalar' + def __init__(self, tag, value, + start_mark=None, end_mark=None, style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class CollectionNode(Node): + def __init__(self, tag, value, + start_mark=None, end_mark=None, flow_style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class SequenceNode(CollectionNode): + id = 'sequence' + +class MappingNode(CollectionNode): + id = 'mapping' +