diff --git a/.gitattributes b/.gitattributes index 2f9df9d884e02e6d4607e988afec0916513a7057..8b1a4f54b32525822e27d004c02f26b81dc3d184 100644 --- a/.gitattributes +++ b/.gitattributes @@ -676,3 +676,4 @@ evalkit_tf437/lib/python3.10/site-packages/scipy/optimize/_highs/_highs_wrapper. deepseekvl2/lib/python3.10/site-packages/transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text deepseekvl2/lib/python3.10/site-packages/transformers/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text deepseekvl2/lib/python3.10/site-packages/transformers/__pycache__/modeling_outputs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab8e8863038c7895fdfc641a5be9a14de81aa5c7 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93c1ee029d685d5441eef5da9d1a0567b0a5c00d Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea35efc5a3c5e2e5080f81e2b4db0302e07737c5 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0065848e5ce3ea5279e078ff4178cccc054aad17 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fd764536c6bbcbb40ac28b1d2c3c04ea4445386 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be07747bf0858a4796a0e46557caa5328b16c0d9 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03eec72c7fb7b4c11f2b1a953e3f18f5e49bd47f Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec6ba49fe380b7d9e19af4cfdc22b7484a3a0807 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f5e72dc326fe71573cdf4135e394ac73b02eb797 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79e1c9756d67f2ee9b96f6cd95e8ce0c1575cc27 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..658255df729549e0b7567dd84192ebf439c3f419 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..995e4d490ae842db1990b7624752e5fc631ad6f8 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..995e3ee5370c3a2eab0de9463cddff484b3e1109 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4e9b97f0d2feb976631305ec44dc828996a1381 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0a8037225efc8ebedc86f6a078734d43c4ece46 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a365c75b39a24219f65905fc84d01f9057992f60 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..393956221f88313caf037b6d9c341370250c63ae Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b77e6b22e87c19615f0c89b3d3204ca4e63da052 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a6ffab863efe3730d51d6a3cc2d06b7bc25c17a Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/httpx/_transports/base.py b/deepseek/lib/python3.10/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000000000000000000000000000000000000..66fd99d702480b555c06694fe14715ea6df3dfc3 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/httpx/_transports/base.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + +__all__ = ["AsyncBaseTransport", "BaseTransport"] + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/INSTALLER b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..6b35667b89db239d793a621a06dceeda8fab98fb --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA @@ -0,0 +1,133 @@ +Metadata-Version: 2.1 +Name: importlib_metadata +Version: 8.5.0 +Summary: Read metadata from Python packages +Author-email: "Jason R. Coombs" +Project-URL: Source, https://github.com/python/importlib_metadata +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: zipp >=3.20 +Requires-Dist: typing-extensions >=3.6.4 ; python_version < "3.8" +Provides-Extra: check +Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check' +Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check' +Provides-Extra: cover +Requires-Dist: pytest-cov ; extra == 'cover' +Provides-Extra: doc +Requires-Dist: sphinx >=3.5 ; extra == 'doc' +Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc' +Requires-Dist: rst.linker >=1.9 ; extra == 'doc' +Requires-Dist: furo ; extra == 'doc' +Requires-Dist: sphinx-lint ; extra == 'doc' +Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc' +Provides-Extra: enabler +Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: test +Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test' +Requires-Dist: packaging ; extra == 'test' +Requires-Dist: pyfakefs ; extra == 'test' +Requires-Dist: flufl.flake8 ; extra == 'test' +Requires-Dist: pytest-perf >=0.9.2 ; extra == 'test' +Requires-Dist: jaraco.test >=5.4 ; extra == 'test' +Requires-Dist: importlib-resources >=1.3 ; (python_version < "3.9") and extra == 'test' +Provides-Extra: type +Requires-Dist: pytest-mypy ; extra == 'type' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: https://pypi.org/project/importlib_metadata + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + +.. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json + :target: https://github.com/astral-sh/ruff + :alt: Ruff + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2024-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata + :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata `_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 7.0 + - 3.13 + * - 6.5 + - 3.12 + * - 4.13 + - 3.11 + * - 4.6 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib-metadata.readthedocs.io/ + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. diff --git a/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/REQUESTED b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..0fde4dd96cac9c2431a08860c658f1a5789618f6 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (74.1.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/top_level.txt b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbb07547a19c30031d13c45cf01cba61dc434e47 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/analyzer.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/analyzer.py new file mode 100644 index 0000000000000000000000000000000000000000..ba5fbda0125f6c244a1bab194e43922119b5a32c --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/analyzer.py @@ -0,0 +1,77 @@ +from typing import Dict, Hashable, List +try: + import numpy as np + import numpy.typing as npt +except ImportError as e: + class FormatEnforcerAnalyzer: # type: ignore + def __init__(self, *args, **kwargs): + pass + def report_raw_logits(self, *args, **kwargs): + pass + def generate_report_dict(self, *args, **kwargs): + return {} + raise ImportError('FormatEnforcerAnalyzer not available because numpy is not installed. Please install it with "pip install numpy"') from e + +from . import TokenEnforcer + +class FormatEnforcerAnalyzer: + """A helper class to help analyze the format enforcer's behavior.""" + def __init__(self, token_enforcer: TokenEnforcer): + self.token_enforcer = token_enforcer + self.raw_logits: Dict[Hashable, npt.ArrayLike] = {} + + def report_raw_logits(self, output_tokens: List[int], logits: npt.ArrayLike): + """Report what logits were generated for a specific token sequence. The logits must be before any processing / filtering.""" + self.raw_logits[tuple(output_tokens)] = logits + + def generate_report_dict(self, output_tokens: List[int]) -> dict: + """Generate a report dict containing the analysis results for a specific output token sequence.""" + scores_matrix: List[npt.ArrayLike] = [] + allowed_tokens_matrix: List[List[int]] = [] + for idx in range(len(output_tokens)): + prefix = output_tokens[:idx] + prefix_tuple = tuple(prefix) + if prefix_tuple in self.raw_logits: + scores_matrix.append(self.raw_logits[prefix_tuple]) + allowed_tokens_matrix.append(self.token_enforcer.get_allowed_tokens(prefix)) + + logits = np.array(scores_matrix) # n_tokens * vocab_size + softmax_logits = _softmax(logits) # n_tokens * vocab_size + original_indices = softmax_logits.argmax(axis=1) # n_tokens + original_scores = _select_array(softmax_logits, original_indices) # n_tokens + + single_token_dict: Dict[int, str] = {token_id: token_str for token_id, token_str, _ in self.token_enforcer.regular_tokens} + def single_token_decoder(token_id: int) -> str: + if token_id in single_token_dict: + return single_token_dict[token_id] + return self.token_enforcer.decoder([token_id]) + + original_tokens = [single_token_decoder(idx) for idx in original_indices] + + penalty_matrix = np.full_like(softmax_logits, -np.inf) + for row in range(penalty_matrix.shape[0]): + penalty_matrix[row][allowed_tokens_matrix[row]] = 0 + enfored_softmax_logits = softmax_logits + penalty_matrix + + enforced_indices = enfored_softmax_logits.argmax(axis=1) + enforced_scores = _select_array(enfored_softmax_logits, enforced_indices) + + enforced_tokens = [single_token_decoder(idx) for idx in enforced_indices] + df_dict = {} # In order to minimize the package's dependencies, we don't create a dataframe, but create a dataframe-like dictionary instead. + df_dict['generated_token'] = enforced_tokens + df_dict['generated_token_idx'] = enforced_indices.tolist() + df_dict['generated_score'] = enforced_scores.tolist() + df_dict['leading_token'] = original_tokens + df_dict['leading_token_idx'] = original_indices.tolist() + df_dict['leading_score'] = original_scores.tolist() + + return df_dict + +def _softmax(arr: np.ndarray) -> np.ndarray: + """Compute softmax values for each sets of scores in arr.""" + e_arr = np.exp(arr) + return e_arr / np.sum(e_arr, axis=1, keepdims=True) + +def _select_array(arr: np.ndarray, index_array: np.ndarray) -> np.ndarray: + # https://numpy.org/doc/stable/reference/generated/numpy.argmax.html + return np.take_along_axis(arr, np.expand_dims(index_array, axis=-1), axis=-1).squeeze(axis=-1) \ No newline at end of file diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/characterlevelparser.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/characterlevelparser.py new file mode 100644 index 0000000000000000000000000000000000000000..186eab9fcde8a5b45cc228846706cfa39b8930e4 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/characterlevelparser.py @@ -0,0 +1,187 @@ +import abc +import os +from dataclasses import dataclass, field +from typing import Hashable, List, Optional, TypeVar +from .consts import (COMPLETE_ALPHABET, WHITESPACE_CHARACTERS, DEFAULT_MAX_CONSECUTIVE_WHITESPACES, + DEFAULT_FORCE_JSON_FIELD_ORDER, CONFIG_ENV_VAR_MAX_CONSECUTIVE_WHITESPACES, + CONFIG_ENV_VAR_STRICT_JSON_FIELD_ORDER, CONFIG_ENV_VAR_MAX_JSON_ARRAY_LENGTH, + DEFAULT_MAX_JSON_ARRAY_LENGTH) + + +def _parse_bool(s: str) -> bool: + return s and (s.strip().lower() in ['true', '1']) + + +def _env_or_default_field(env_var: str, default_val): + default_val_type = type(default_val) + parser_func = _parse_bool if default_val_type == bool else default_val_type + def factory_func(): + return parser_func(os.environ.get(env_var, str(default_val))) + return field(default_factory=factory_func) + + +@dataclass +class CharacterLevelParserConfig: + alphabet: str = COMPLETE_ALPHABET + max_consecutive_whitespaces: int = _env_or_default_field(CONFIG_ENV_VAR_MAX_CONSECUTIVE_WHITESPACES, + DEFAULT_MAX_CONSECUTIVE_WHITESPACES) + """How many consective whitespaces the JsonSchemaParser will allow""" + force_json_field_order: bool = _env_or_default_field(CONFIG_ENV_VAR_STRICT_JSON_FIELD_ORDER, + DEFAULT_FORCE_JSON_FIELD_ORDER) + """Whether the JsonSchemaParser will force fields to appear in the + order of the 'required' field in the schema""" + max_json_array_length: int = _env_or_default_field(CONFIG_ENV_VAR_MAX_JSON_ARRAY_LENGTH, + DEFAULT_MAX_JSON_ARRAY_LENGTH) + """What is the maximum json array length if not specified by the schema. Helps the LLM + avoid infinite loops.""" + + +class CharacterLevelParser(abc.ABC): + """CharacterLevelParser is an interface for classes that can parse strings one character at a time, and determine which characters are allowed at any specific time""" + + def __init__(self, config: Optional[CharacterLevelParserConfig] = None): + self._config = config or CharacterLevelParserConfig() + + @abc.abstractmethod + def add_character(self, new_character: str) -> 'CharacterLevelParser': + """Add a character to the parser, and return a new parser that represents the state of the parser after the character has been added. This has to be + an immutable operation - the original CharacterLevelParser (self) must not be modified.""" + raise NotImplementedError() + + @abc.abstractmethod + def get_allowed_characters(self) -> str: + """Return a string containing all characters that are allowed at the current point in the parsing process.""" + raise NotImplementedError() + + @abc.abstractmethod + def can_end(self) -> bool: + """Return True if the parser is in a state where it can end (potentially finished parsing the desired structure), and False otherwise.""" + raise NotImplementedError() + + def shortcut_key(self) -> Optional[Hashable]: + """Optional. Return a key that denotes that this state is a repeating state, full tree traversal should be avoided.""" + return None + + def cache_key(self) -> Optional[Hashable]: + """Optional. Return a key that denotes that this state is a repeating state, and if it is visited again, results can be cached.""" + return None + + @property + def config(self) -> CharacterLevelParserConfig: + return self._config + + @config.setter + def config(self, new_config: CharacterLevelParserConfig): + self._config = new_config + return self + + +class StringParser(CharacterLevelParser): + """RegexParser is an example CharacterLevelParser that only allows an exact string. It is a debugging / learning tool + to show how CharacterLevelParser works together with TokenizerPrefixTree to filter the allowed tokens (some of whom may contain multiple characters)""" + def __init__(self, string: str): + self.target_str = string + + def add_character(self, new_character: str) -> CharacterLevelParser: + if self.target_str.startswith(new_character): + return StringParser(self.target_str[len(new_character):]) + else: + raise ValueError(f"Expected '{self.target_str[0]}' but got '{new_character}'") + + def get_allowed_characters(self) -> str: + return self.target_str[0] if self.target_str else "" + + def can_end(self) -> bool: + return not self.target_str + + +class ForceStopParser(CharacterLevelParser): + """A simple parser that forbids any characters except the stop token. Used to force stop LM operation""" + def __init__(self, allow_whitespace: bool = False): + self.allow_whitespace = allow_whitespace + def add_character(self, new_character: str) -> CharacterLevelParser: + return self + def get_allowed_characters(self) -> str: + return WHITESPACE_CHARACTERS if self.allow_whitespace else "" + def can_end(self) -> bool: + return True + + +class UnionParser(CharacterLevelParser): + """A parser that allows a string that would be allowed by any of several different parsers""" + def __init__(self, parsers: List[CharacterLevelParser]): + self.parsers = parsers + + def add_character(self, new_character: str) -> CharacterLevelParser: + # This is a bit of a performance hit, as it means get_allowed_characters() is called twice. + relevant_parsers = [parser for parser in self.parsers if new_character in parser.get_allowed_characters()] + next_parsers = [parser.add_character(new_character) for parser in relevant_parsers] + if len(next_parsers) == 1: + return next_parsers[0] + return UnionParser(next_parsers) + + def get_allowed_characters(self) -> str: + allowed = "".join([parser.get_allowed_characters() for parser in self.parsers]) + return "".join(set(allowed)) + + def can_end(self) -> bool: + return any([parser.can_end() for parser in self.parsers]) + + def shortcut_key(self) -> Optional[Hashable]: + unique_shortcut_keys = set(parser.shortcut_key() for parser in self.parsers) + if len(unique_shortcut_keys) == 1: + return next(iter(unique_shortcut_keys)) + return None + + def cache_key(self) -> Optional[Hashable]: + all_cache_keys = tuple(parser.cache_key() for parser in self.parsers) + if all(key is not None for key in all_cache_keys): + return ('union', all_cache_keys) + return None + + +class SequenceParser(CharacterLevelParser): + """A parser that is a sequence of multiple parsers.""" + def __init__(self, parsers: List[CharacterLevelParser]): + self.parsers = parsers + + def add_character(self, new_character: str) -> CharacterLevelParser: + legal_parsers = [] + # Tricky edge case: if the first parser can both end and accept the character, + # and the second parser can also accept, we don't know which scenario we are dealing + # with, so we need to return a UnionParser. + for idx, parser in enumerate(self.parsers): + if new_character in parser.get_allowed_characters(): + updated_parser = parser.add_character(new_character) + next_parsers = [updated_parser] + self.parsers[idx+1:] + if len(next_parsers) == 1: + legal_parsers.append(next_parsers[0]) + else: + legal_parsers.append(SequenceParser(next_parsers)) + if not parser.can_end(): + break + if len(legal_parsers) == 1: + return legal_parsers[0] + return UnionParser(legal_parsers) + + def get_allowed_characters(self) -> str: + allowed_characters = set() + for parser in self.parsers: + allowed_characters.update(parser.get_allowed_characters()) + if not parser.can_end(): + break + return "".join(allowed_characters) + + def can_end(self) -> bool: + return all([parser.can_end() for parser in self.parsers]) + + def shortcut_key(self) -> Optional[str]: + return self.parsers[0].shortcut_key() if len(self.parsers) == 1 else None + + def cache_key(self) -> Optional[Hashable]: + all_cache_keys = tuple(parser.cache_key() for parser in self.parsers) + if all(key is not None for key in all_cache_keys): + return ('sequence', all_cache_keys) + return None + + diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__init__.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__pycache__/jsonschemaobjectutil.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__pycache__/jsonschemaobjectutil.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..382b1c4997a396f96f82b946e60bcab7bff33e5e Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__pycache__/jsonschemaobjectutil.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/jsonschemaobjectutil.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/jsonschemaobjectutil.py new file mode 100644 index 0000000000000000000000000000000000000000..f89609c9b6f093ac5e4c5eaccb2efdc453a9173f --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/external/jsonschemaobjectutil.py @@ -0,0 +1,231 @@ +# https://github.com/koxudaxi/datamodel-code-generator/blob/master/datamodel_code_generator/util.py +# MIT License + +# Copyright (c) 2019 Koudai Aono + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Callable, TypeVar +from enum import Enum, auto + +import pydantic +from packaging import version +from pydantic import BaseModel as _BaseModel + +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterator, + TypeVar, + Union, +) + +PYDANTIC_VERSION = version.parse( + pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION) +) + +PYDANTIC_V2: bool = PYDANTIC_VERSION >= version.parse('2.0b3') + +if PYDANTIC_V2: + from pydantic import GetCoreSchemaHandler + from pydantic_core import core_schema + +if TYPE_CHECKING: + cached_property = property + from yaml import SafeLoader + + Protocol = object + runtime_checkable: Callable[..., Any] + + from typing_extensions import Literal +else: + try: + from typing import Protocol + except ImportError: + from typing_extensions import Protocol # noqa + try: + from typing import runtime_checkable + except ImportError: + from typing_extensions import runtime_checkable # noqa + try: + from yaml import CSafeLoader as SafeLoader + except ImportError: # pragma: no cover + from yaml import SafeLoader + + try: + from functools import cached_property + except ImportError: + _NOT_FOUND = object() + + class cached_property: + def __init__(self, func: Callable) -> None: + self.func: Callable = func + self.__doc__: Any = func.__doc__ + + def __get__(self, instance: Any, owner: Any = None) -> Any: + value = instance.__dict__.get(self.func.__name__, _NOT_FOUND) + if value is _NOT_FOUND: # pragma: no cover + value = instance.__dict__[self.func.__name__] = self.func(instance) + return value + + +SafeLoader.yaml_constructors[ + 'tag:yaml.org,2002:timestamp' +] = SafeLoader.yaml_constructors['tag:yaml.org,2002:str'] + + +Model = TypeVar('Model', bound=_BaseModel) + + +def model_validator( + mode: Literal['before', 'after'] = 'after', +) -> Callable[[Callable[[Model, Any], Any]], Callable[[Model, Any], Any]]: + def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]: + if PYDANTIC_V2: + from pydantic import model_validator as model_validator_v2 + + return model_validator_v2(mode=mode)(method) # type: ignore + else: + from pydantic import root_validator + + return root_validator(method, pre=mode == 'before') # type: ignore + + return inner + + +def field_validator( + field_name: str, + *fields: str, + mode: Literal['before', 'after'] = 'after', +) -> Callable[[Any], Callable[[Model, Any], Any]]: + def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]: + if PYDANTIC_V2: + from pydantic import field_validator as field_validator_v2 + + return field_validator_v2(field_name, *fields, mode=mode)(method) # type: ignore + else: + from pydantic import validator + + return validator(field_name, *fields, pre=mode == 'before')(method) # type: ignore + + return inner + + +if PYDANTIC_V2: + from pydantic import ConfigDict as ConfigDict +else: + ConfigDict = dict # type: ignore + + +class BaseModel(_BaseModel): + if PYDANTIC_V2: + model_config = ConfigDict(strict=False) + + +def is_url(ref: str) -> bool: + return ref.startswith(('https://', 'http://')) + + +class Types(Enum): + integer = auto() + int32 = auto() + int64 = auto() + number = auto() + float = auto() + double = auto() + decimal = auto() + time = auto() + string = auto() + byte = auto() + binary = auto() + date = auto() + date_time = auto() + password = auto() + email = auto() + uuid = auto() + uuid1 = auto() + uuid2 = auto() + uuid3 = auto() + uuid4 = auto() + uuid5 = auto() + uri = auto() + hostname = auto() + ipv4 = auto() + ipv4_network = auto() + ipv6 = auto() + ipv6_network = auto() + boolean = auto() + object = auto() + null = auto() + array = auto() + any = auto() + +class UnionIntFloat: + def __init__(self, value: Union[int, float]) -> None: + self.value: Union[int, float] = value + + def __int__(self) -> int: + return int(self.value) + + def __float__(self) -> float: + return float(self.value) + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]: + yield cls.validate + + @classmethod + def __get_pydantic_core_schema__( + cls, _source_type: Any, _handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + from_int_schema = core_schema.chain_schema( + [ + core_schema.union_schema( + [core_schema.int_schema(), core_schema.float_schema()] + ), + core_schema.no_info_plain_validator_function(cls.validate), + ] + ) + + return core_schema.json_or_python_schema( + json_schema=core_schema.no_info_plain_validator_function(cls.validate), + python_schema=core_schema.union_schema( + [ + # check if it's an instance first before doing any further work + core_schema.is_instance_schema(UnionIntFloat), + from_int_schema, + ] + ), + serialization=core_schema.plain_serializer_function_ser_schema( + lambda instance: instance.value + ), + ) + + @classmethod + def validate(cls, v: Any) -> UnionIntFloat: + if isinstance(v, UnionIntFloat): + return v + elif not isinstance(v, (int, float)): # pragma: no cover + raise TypeError(f'{v} is not int or float') + return cls(v) diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/llamacpp.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/llamacpp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ed252029df911089bb9c167884fe5998d82619c Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/llamacpp.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/transformers.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/transformers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9a27cd3781d5cf10aed3c4a1c10f7a571f32f43 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/transformers.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/llamacpp.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/llamacpp.py new file mode 100644 index 0000000000000000000000000000000000000000..90200e02bd58da8cd7062a38e286230e2087cbe2 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/llamacpp.py @@ -0,0 +1,74 @@ +try: + from llama_cpp import Llama, LogitsProcessor +except ImportError: + raise ImportError('llama-cpp-python is not installed. Please install it with "pip install llama-cpp-python"') +from lmformatenforcer import CharacterLevelParser, TokenEnforcer, FormatEnforcerAnalyzer, TokenEnforcerTokenizerData +import numpy as np +import numpy.typing as npt +from typing import Tuple, List, Union + +def _build_regular_tokens_list(llm: Llama) -> List[Tuple[int, str, bool]]: + token_0 = llm.tokenize(b"0")[-1] + regular_tokens = [] + special_tokens = [llm.token_bos(), llm.token_eos()] + for token_idx in range(llm.n_vocab()): + if token_idx in special_tokens: + continue + # We prepend token 0 and skip the first letter of the result to get a space if the token is a start word. + try: + decoded_after_0 = llm.detokenize([token_0, token_idx]).decode('utf-8')[1:] + decoded_regular = llm.detokenize([token_idx]).decode('utf-8') + is_word_start_token = len(decoded_after_0) > len(decoded_regular) + regular_tokens.append((token_idx, decoded_after_0, is_word_start_token)) + except: + # This can happen for cases such as raw bytes outside of the ASCII range. We assign this a value of �, + # which is what huggingface does for tokens that are meaningless on their own. Allowing this in the + # json_freetext field will allow the language model to build unicode sequences from multiple tokens + # in JSON-freetext fields. + regular_tokens.append((token_idx, '�', False)) + return regular_tokens + + +def build_token_enforcer_tokenizer_data(llm: Llama) -> TokenEnforcerTokenizerData: + regular_tokens = _build_regular_tokens_list(llm) + + def decoder_fn(sent: List[int]) -> str: + try: + return llm.detokenize(sent).decode('utf-8') + except: + return decoder_fn(sent[:-1]) + + return TokenEnforcerTokenizerData(regular_tokens, decoder_fn, llm.token_eos()) + + +class LlamaCppLogitsProcessor: + def __init__(self, token_enforcer: TokenEnforcer, analyze): + self.token_enforcer = token_enforcer + self.analyzer = FormatEnforcerAnalyzer(token_enforcer) if analyze else None + self.mask = None + + def __call__(self, input_ids: npt.NDArray[np.intc], scores: npt.NDArray[np.single]) -> npt.NDArray[np.single]: + token_sequence = input_ids.tolist() + if self.analyzer: + self.analyzer.report_raw_logits(token_sequence, scores.tolist()) + allowed_tokens = self.token_enforcer.get_allowed_tokens(token_sequence) + if self.mask is None: + self.mask = np.ones(scores.shape, bool) + else: + self.mask.fill(True) + self.mask[allowed_tokens] = False + scores[self.mask] = float('-inf') + return scores + + +def build_llamacpp_logits_processor(llm: Union[Llama, TokenEnforcerTokenizerData], character_level_parser: CharacterLevelParser, analyze: bool=False) -> LlamaCppLogitsProcessor: + """Build the logits processor function that llama.cpp will use to filter the tokens generated by the model. The result + can be passed in the logits_processor list that is sent to the call or generate() method of llama.cpp models.""" + if isinstance(llm, Llama): + llm = build_token_enforcer_tokenizer_data(llm) + + token_enforcer = TokenEnforcer(llm, character_level_parser) + return LlamaCppLogitsProcessor(token_enforcer, analyze) + + +__all__ = ['build_llamacpp_logits_processor', 'build_token_enforcer_tokenizer_data'] diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/transformers.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/transformers.py new file mode 100644 index 0000000000000000000000000000000000000000..0bdfd2f69a56aa727bb4557b47a03f5e1391e9ca --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/transformers.py @@ -0,0 +1,146 @@ +import functools +from typing import Any, Callable, List, Optional, Tuple, Union +try: + from transformers import AutoModelForCausalLM + from transformers.generation.logits_process import LogitsWarper, PrefixConstrainedLogitsProcessor + from transformers.tokenization_utils import PreTrainedTokenizerBase +except ImportError: + raise ImportError('transformers is not installed. Please install it with "pip install transformers[torch]"') + +try: + import torch +except ImportError: + raise ImportError('pytorch is not installed. See https://pytorch.org/get-started/locally/ for installation instructions."') + +from ..characterlevelparser import CharacterLevelParser +from ..tokenenforcer import TokenEnforcer, TokenEnforcerTokenizerData +from ..analyzer import FormatEnforcerAnalyzer + +class LogitsSaverWarper(LogitsWarper): + def __init__(self, analyzer: FormatEnforcerAnalyzer) -> None: + self.analyzer = analyzer + + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor: + cpu_inputs = input_ids.tolist() + cpu_scores = scores.tolist() + for single_batch_inputs, single_batch_scores in zip(cpu_inputs, cpu_scores): + self.analyzer.report_raw_logits(single_batch_inputs, single_batch_scores) + return scores + +class LogitsSaverManager: + warper: LogitsSaverWarper + + def __init__(self, model: AutoModelForCausalLM, analyzer: FormatEnforcerAnalyzer): + self.model = model + self.warper = None + self.old_warper = None + self.analyzer = analyzer + + def replace_logits_warper(self, filter_func = None): + self.old_warper = self.model._get_logits_warper + + def new_logits_warper(generation_config): + warpers = self.old_warper(generation_config) + self.warper = LogitsSaverWarper(self.analyzer) + warpers.insert(0, self.warper) + if filter_func is not None: + processor = PrefixConstrainedLogitsProcessor(filter_func, 1) + warpers.insert(1, processor) + return warpers + self.model._get_logits_warper = new_logits_warper + + def unreplace_logits_warper(self): + self.model._get_logits_warper = self.old_warper + + +def _build_regular_tokens_list(tokenizer: PreTrainedTokenizerBase, vocab_size: int) -> List[Tuple[int, str, bool]]: + token_0 = tokenizer.encode("0")[-1] + regular_tokens = [] + for token_idx in range(vocab_size): + if token_idx in tokenizer.all_special_ids: + continue + # We prepend token 0 and skip the first letter of the result to get a space if the token is a start word. + decoded_after_0 = tokenizer.decode([token_0, token_idx])[1:] + decoded_regular = tokenizer.decode([token_idx]) + is_word_start_token = len(decoded_after_0) > len(decoded_regular) + regular_tokens.append((token_idx, decoded_after_0, is_word_start_token)) + return regular_tokens + + +def _decode_function(tokenizer: PreTrainedTokenizerBase, tokens: List[int]) -> str: + decoded = tokenizer.decode(tokens) + cleaned = decoded.rstrip('�') + return cleaned + + +def build_token_enforcer_tokenizer_data(tokenizer: PreTrainedTokenizerBase, + vocab_size: Optional[int] = None) -> TokenEnforcerTokenizerData: + vocab_size = vocab_size or len(tokenizer) + regular_tokens = _build_regular_tokens_list(tokenizer, vocab_size) + decode_fn = functools.partial(_decode_function, tokenizer) + return TokenEnforcerTokenizerData(regular_tokens, decode_fn, tokenizer.eos_token_id) + + +class TransformersPrefixAllowedTokensFn: + def __init__(self, token_enforcer: TokenEnforcer): + self.token_enforcer = token_enforcer + + def __call__(self, batch_id: int, sent: torch.Tensor) -> List[int]: + token_sequence = sent.tolist() + return self.token_enforcer.get_allowed_tokens(token_sequence) + + +def build_transformers_prefix_allowed_tokens_fn(tokenizer_data: Union[PreTrainedTokenizerBase, TokenEnforcerTokenizerData], + character_level_parser: CharacterLevelParser) -> TransformersPrefixAllowedTokensFn: + """Build the prefix allowed tokens function that transformers will use to filter the tokens generated by the model. The result + can be passed to the prefix_allowed_tokens_fn parameter of the generate() method of transformers models or pipeline configurations.""" + if isinstance(tokenizer_data, PreTrainedTokenizerBase): + tokenizer_data = build_token_enforcer_tokenizer_data(tokenizer_data) + token_enforcer = TokenEnforcer(tokenizer_data, character_level_parser) + return TransformersPrefixAllowedTokensFn(token_enforcer) + + +def generate_enforced(model: AutoModelForCausalLM, + tokenizer: Union[PreTrainedTokenizerBase, TokenEnforcerTokenizerData], + character_level_parser: CharacterLevelParser, + **kwargs: dict) -> Union[str, dict]: + """Generate text from a model while enforcing a given format, generating enforcing diagnostic information. + This can be used instead of calling model.generate(). + If return_dict_in_generate and output_scores parameters are True, diagnostic information will be returned in the result. + If you don't need this, consider using prefix_allowed_tokens_fn + build_transformers_prefix_allowed_tokens_fn() instead""" + + transformers_filter_allowed_tokens = build_transformers_prefix_allowed_tokens_fn(tokenizer, character_level_parser) + + is_multi_inputs = kwargs['input_ids'].shape[0] > 1 + is_multi_beams = kwargs.get('num_beams', 1) > 1 + support_diagnostics = not (is_multi_inputs or is_multi_beams) # TODO: Support diagnostics in these cases as well. + return_dict_in_generate = kwargs.get('return_dict_in_generate', False) + output_scores = kwargs.get('output_scores', None) + + # We do some internals hacking in order to extract the data needed for diagnostics. If we weren't asked for them, + # we are better off simply using prefix_allowed_tokens_fn parameter. + should_run_in_advanced_mode = return_dict_in_generate and output_scores and support_diagnostics + + if should_run_in_advanced_mode: + analyzer = FormatEnforcerAnalyzer(transformers_filter_allowed_tokens.token_enforcer) + logits_saver = LogitsSaverManager(model, analyzer) + logits_saver.replace_logits_warper(transformers_filter_allowed_tokens) + generate_kwargs = kwargs + + try: + output = model.generate(**generate_kwargs) + finally: + logits_saver.unreplace_logits_warper() + + df_dict = analyzer.generate_report_dict(output['sequences'][0].tolist()) + output.enforced_scores = df_dict + else: + output = model.generate(**kwargs, prefix_allowed_tokens_fn=transformers_filter_allowed_tokens) + + return output + +__all__ = [ + 'build_transformers_prefix_allowed_tokens_fn', + 'generate_enforced', + 'build_token_enforcer_tokenizer_data' +] \ No newline at end of file diff --git a/deepseek/lib/python3.10/site-packages/lmformatenforcer/regexparser.py b/deepseek/lib/python3.10/site-packages/lmformatenforcer/regexparser.py new file mode 100644 index 0000000000000000000000000000000000000000..ca40fa6eee562b5d033edb30e25781984ab94f91 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lmformatenforcer/regexparser.py @@ -0,0 +1,85 @@ +from typing import Dict, Hashable, Optional, Union, List +import interegular +from interegular.fsm import anything_else + +from .characterlevelparser import CharacterLevelParser, CharacterLevelParserConfig + +class RegexParser(CharacterLevelParser): + """RegexParser is an example CharacterLevelParser that only allows strings that match a given regular expression.""" + + UNINITIALIZED_STATE = -1 + INVALID_STATE = -2 + + class _Context: + pattern: interegular.FSM + anything_else_characters: str + state_character_cache: Dict[int, str] + + context: _Context + current_state: int + + def __init__(self, pattern: Union[str, _Context], config: Optional[CharacterLevelParserConfig] = None, current_state: int = UNINITIALIZED_STATE): + super().__init__(config) + if isinstance(pattern, str): + self.context = RegexParser._Context() + self.context.pattern = interegular.parse_pattern(pattern).to_fsm() + self.context.state_character_cache = {} + self._update_alphabet(self.config.alphabet) + else: + self.context = pattern + self.current_state: int = self.context.pattern.initial if current_state == RegexParser.UNINITIALIZED_STATE else current_state + + def add_character(self, new_character: str) -> 'RegexParser': + if self.current_state == RegexParser.INVALID_STATE: + return self + + state = self.current_state + fsm = self.context.pattern + # Mostly taken from FSM.accept() + symbol = new_character + if anything_else in fsm.alphabet and not symbol in fsm.alphabet: + symbol = anything_else + transition = fsm.alphabet[symbol] + + try: + # Prefer try-catch to checking if transition exists to avoid double lookup perf hit in valid case + state = fsm.map[state][transition] # type: ignore + return RegexParser(self.context, self.config, state) + except KeyError: + # Missing transition = transition to dead state + return RegexParser(self.context, self.config, RegexParser.INVALID_STATE) + + def can_end(self) -> bool: + return self.current_state in self.context.pattern.finals or self.current_state == RegexParser.INVALID_STATE + + def get_allowed_characters(self) -> str: + if self.current_state not in self.context.pattern.map: + return '' + if self.current_state not in self.context.state_character_cache: + allowed_characters = [] + state_map = self.context.pattern.map[self.current_state] + for symbol_idx in state_map: + symbols: List[str] = self.context.pattern.alphabet.by_transition[symbol_idx] + for symbol in symbols: + if symbol == anything_else: + allowed_characters.append(self.context.anything_else_characters) + else: + allowed_characters.append(symbol) + self.context.state_character_cache[self.current_state] = "".join(allowed_characters) + return self.context.state_character_cache[self.current_state] + + def cache_key(self) -> Optional[Hashable]: + # If we are in the same regex fsm state, the allowed next tokens are the same ones + return self.current_state + + def _update_alphabet(self, new_alphabet: str): + if self.context: + not_anything_else_characters = set([c for c in self.context.pattern.alphabet.keys() if c != anything_else]) + self.context.anything_else_characters = "".join([c for c in new_alphabet if c not in not_anything_else_characters]) + + @CharacterLevelParser.config.setter + def config(self, new_config: CharacterLevelParserConfig): + CharacterLevelParser.config.fset(self, new_config) # Original set + self._update_alphabet(new_config.alphabet) + + diff --git a/deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE b/deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE new file mode 100644 index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA b/deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..1479c8694bfbd583a896dbe9bd33cdb6d7e7371e --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.3 +Name: packaging +Version: 24.2 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af1caa2aea71a5b3d06f1e83d3ce03a365598795 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d74d07fb876852c3181c7a6b54401a804941c909e42efc31fff34dc31da6c5b +size 100332 diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/__init__.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..62657e39a0c5bc6b8c2e71f83dbf8f2e266626b4 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/ast_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/ast_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..574344d524193ddd7cdc49d5016c59abfca76c99 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/ast_parser.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/mathematica.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/mathematica.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..582e4945de12f2b829d011dd5102b610e7cd99be Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/mathematica.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/maxima.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/maxima.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff3fce1bb850fdbdfc0e560e3d51ab1076485ef9 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/maxima.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sym_expr.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sym_expr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d95eeaaa423ca98602ded4574683d99882e1ea9b Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sym_expr.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fad6441b98d569263282f20bd24b3025a67a5a0c Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LICENSE.txt b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..6bbfda911b2afada41a568218e31a6502dc68f44 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright 2016, latex2sympy + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LaTeX.g4 b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LaTeX.g4 new file mode 100644 index 0000000000000000000000000000000000000000..fc2c30f9817931e2060b549a39f98a6a4f9cb1f7 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LaTeX.g4 @@ -0,0 +1,312 @@ +/* + ANTLR4 LaTeX Math Grammar + + Ported from latex2sympy by @augustt198 https://github.com/augustt198/latex2sympy See license in + LICENSE.txt + */ + +/* + After changing this file, it is necessary to run `python setup.py antlr` in the root directory of + the repository. This will regenerate the code in `sympy/parsing/latex/_antlr/*.py`. + */ + +grammar LaTeX; + +options { + language = Python3; +} + +WS: [ \t\r\n]+ -> skip; +THINSPACE: ('\\,' | '\\thinspace') -> skip; +MEDSPACE: ('\\:' | '\\medspace') -> skip; +THICKSPACE: ('\\;' | '\\thickspace') -> skip; +QUAD: '\\quad' -> skip; +QQUAD: '\\qquad' -> skip; +NEGTHINSPACE: ('\\!' | '\\negthinspace') -> skip; +NEGMEDSPACE: '\\negmedspace' -> skip; +NEGTHICKSPACE: '\\negthickspace' -> skip; +CMD_LEFT: '\\left' -> skip; +CMD_RIGHT: '\\right' -> skip; + +IGNORE: + ( + '\\vrule' + | '\\vcenter' + | '\\vbox' + | '\\vskip' + | '\\vspace' + | '\\hfil' + | '\\*' + | '\\-' + | '\\.' + | '\\/' + | '\\"' + | '\\(' + | '\\=' + ) -> skip; + +ADD: '+'; +SUB: '-'; +MUL: '*'; +DIV: '/'; + +L_PAREN: '('; +R_PAREN: ')'; +L_BRACE: '{'; +R_BRACE: '}'; +L_BRACE_LITERAL: '\\{'; +R_BRACE_LITERAL: '\\}'; +L_BRACKET: '['; +R_BRACKET: ']'; + +BAR: '|'; + +R_BAR: '\\right|'; +L_BAR: '\\left|'; + +L_ANGLE: '\\langle'; +R_ANGLE: '\\rangle'; +FUNC_LIM: '\\lim'; +LIM_APPROACH_SYM: + '\\to' + | '\\rightarrow' + | '\\Rightarrow' + | '\\longrightarrow' + | '\\Longrightarrow'; +FUNC_INT: + '\\int' + | '\\int\\limits'; +FUNC_SUM: '\\sum'; +FUNC_PROD: '\\prod'; + +FUNC_EXP: '\\exp'; +FUNC_LOG: '\\log'; +FUNC_LG: '\\lg'; +FUNC_LN: '\\ln'; +FUNC_SIN: '\\sin'; +FUNC_COS: '\\cos'; +FUNC_TAN: '\\tan'; +FUNC_CSC: '\\csc'; +FUNC_SEC: '\\sec'; +FUNC_COT: '\\cot'; + +FUNC_ARCSIN: '\\arcsin'; +FUNC_ARCCOS: '\\arccos'; +FUNC_ARCTAN: '\\arctan'; +FUNC_ARCCSC: '\\arccsc'; +FUNC_ARCSEC: '\\arcsec'; +FUNC_ARCCOT: '\\arccot'; + +FUNC_SINH: '\\sinh'; +FUNC_COSH: '\\cosh'; +FUNC_TANH: '\\tanh'; +FUNC_ARSINH: '\\arsinh'; +FUNC_ARCOSH: '\\arcosh'; +FUNC_ARTANH: '\\artanh'; + +L_FLOOR: '\\lfloor'; +R_FLOOR: '\\rfloor'; +L_CEIL: '\\lceil'; +R_CEIL: '\\rceil'; + +FUNC_SQRT: '\\sqrt'; +FUNC_OVERLINE: '\\overline'; + +CMD_TIMES: '\\times'; +CMD_CDOT: '\\cdot'; +CMD_DIV: '\\div'; +CMD_FRAC: + '\\frac' + | '\\dfrac' + | '\\tfrac'; +CMD_BINOM: '\\binom'; +CMD_DBINOM: '\\dbinom'; +CMD_TBINOM: '\\tbinom'; + +CMD_MATHIT: '\\mathit'; + +UNDERSCORE: '_'; +CARET: '^'; +COLON: ':'; + +fragment WS_CHAR: [ \t\r\n]; +DIFFERENTIAL: 'd' WS_CHAR*? ([a-zA-Z] | '\\' [a-zA-Z]+); + +LETTER: [a-zA-Z]; +DIGIT: [0-9]; + +EQUAL: (('&' WS_CHAR*?)? '=') | ('=' (WS_CHAR*? '&')?); +NEQ: '\\neq'; + +LT: '<'; +LTE: ('\\leq' | '\\le' | LTE_Q | LTE_S); +LTE_Q: '\\leqq'; +LTE_S: '\\leqslant'; + +GT: '>'; +GTE: ('\\geq' | '\\ge' | GTE_Q | GTE_S); +GTE_Q: '\\geqq'; +GTE_S: '\\geqslant'; + +BANG: '!'; + +SINGLE_QUOTES: '\''+; + +SYMBOL: '\\' [a-zA-Z]+; + +math: relation; + +relation: + relation (EQUAL | LT | LTE | GT | GTE | NEQ) relation + | expr; + +equality: expr EQUAL expr; + +expr: additive; + +additive: additive (ADD | SUB) additive | mp; + +// mult part +mp: + mp (MUL | CMD_TIMES | CMD_CDOT | DIV | CMD_DIV | COLON) mp + | unary; + +mp_nofunc: + mp_nofunc ( + MUL + | CMD_TIMES + | CMD_CDOT + | DIV + | CMD_DIV + | COLON + ) mp_nofunc + | unary_nofunc; + +unary: (ADD | SUB) unary | postfix+; + +unary_nofunc: + (ADD | SUB) unary_nofunc + | postfix postfix_nofunc*; + +postfix: exp postfix_op*; +postfix_nofunc: exp_nofunc postfix_op*; +postfix_op: BANG | eval_at; + +eval_at: + BAR (eval_at_sup | eval_at_sub | eval_at_sup eval_at_sub); + +eval_at_sub: UNDERSCORE L_BRACE (expr | equality) R_BRACE; + +eval_at_sup: CARET L_BRACE (expr | equality) R_BRACE; + +exp: exp CARET (atom | L_BRACE expr R_BRACE) subexpr? | comp; + +exp_nofunc: + exp_nofunc CARET (atom | L_BRACE expr R_BRACE) subexpr? + | comp_nofunc; + +comp: + group + | abs_group + | func + | atom + | floor + | ceil; + +comp_nofunc: + group + | abs_group + | atom + | floor + | ceil; + +group: + L_PAREN expr R_PAREN + | L_BRACKET expr R_BRACKET + | L_BRACE expr R_BRACE + | L_BRACE_LITERAL expr R_BRACE_LITERAL; + +abs_group: BAR expr BAR; + +number: DIGIT+ (',' DIGIT DIGIT DIGIT)* ('.' DIGIT+)?; + +atom: (LETTER | SYMBOL) (subexpr? SINGLE_QUOTES? | SINGLE_QUOTES? subexpr?) + | number + | DIFFERENTIAL + | mathit + | frac + | binom + | bra + | ket; + +bra: L_ANGLE expr (R_BAR | BAR); +ket: (L_BAR | BAR) expr R_ANGLE; + +mathit: CMD_MATHIT L_BRACE mathit_text R_BRACE; +mathit_text: LETTER*; + +frac: CMD_FRAC (upperd = DIGIT | L_BRACE upper = expr R_BRACE) + (lowerd = DIGIT | L_BRACE lower = expr R_BRACE); + +binom: + (CMD_BINOM | CMD_DBINOM | CMD_TBINOM) L_BRACE n = expr R_BRACE L_BRACE k = expr R_BRACE; + +floor: L_FLOOR val = expr R_FLOOR; +ceil: L_CEIL val = expr R_CEIL; + +func_normal: + FUNC_EXP + | FUNC_LOG + | FUNC_LG + | FUNC_LN + | FUNC_SIN + | FUNC_COS + | FUNC_TAN + | FUNC_CSC + | FUNC_SEC + | FUNC_COT + | FUNC_ARCSIN + | FUNC_ARCCOS + | FUNC_ARCTAN + | FUNC_ARCCSC + | FUNC_ARCSEC + | FUNC_ARCCOT + | FUNC_SINH + | FUNC_COSH + | FUNC_TANH + | FUNC_ARSINH + | FUNC_ARCOSH + | FUNC_ARTANH; + +func: + func_normal (subexpr? supexpr? | supexpr? subexpr?) ( + L_PAREN func_arg R_PAREN + | func_arg_noparens + ) + | (LETTER | SYMBOL) (subexpr? SINGLE_QUOTES? | SINGLE_QUOTES? subexpr?) // e.g. f(x), f_1'(x) + L_PAREN args R_PAREN + | FUNC_INT (subexpr supexpr | supexpr subexpr)? ( + additive? DIFFERENTIAL + | frac + | additive + ) + | FUNC_SQRT (L_BRACKET root = expr R_BRACKET)? L_BRACE base = expr R_BRACE + | FUNC_OVERLINE L_BRACE base = expr R_BRACE + | (FUNC_SUM | FUNC_PROD) (subeq supexpr | supexpr subeq) mp + | FUNC_LIM limit_sub mp; + +args: (expr ',' args) | expr; + +limit_sub: + UNDERSCORE L_BRACE (LETTER | SYMBOL) LIM_APPROACH_SYM expr ( + CARET ((L_BRACE (ADD | SUB) R_BRACE) | ADD | SUB) + )? R_BRACE; + +func_arg: expr | (expr ',' func_arg); +func_arg_noparens: mp_nofunc; + +subexpr: UNDERSCORE (atom | L_BRACE expr R_BRACE); +supexpr: CARET (atom | L_BRACE expr R_BRACE); + +subeq: UNDERSCORE L_BRACE equality R_BRACE; +supeq: UNDERSCORE L_BRACE equality R_BRACE; diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e2cd51429ca2fc013c1d1565695ee9cc938d9de5 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__init__.py @@ -0,0 +1,66 @@ +from sympy.external import import_module +from sympy.utilities.decorator import doctest_depends_on + +from sympy.parsing.latex.lark import LarkLaTeXParser, TransformToSymPyExpr, parse_latex_lark # noqa + +from .errors import LaTeXParsingError # noqa + + +__doctest_requires__ = {('parse_latex',): ['antlr4', 'lark']} + + +@doctest_depends_on(modules=('antlr4', 'lark')) +def parse_latex(s, strict=False, backend="antlr"): + r"""Converts the input LaTeX string ``s`` to a SymPy ``Expr``. + + Parameters + ========== + + s : str + The LaTeX string to parse. In Python source containing LaTeX, + *raw strings* (denoted with ``r"``, like this one) are preferred, + as LaTeX makes liberal use of the ``\`` character, which would + trigger escaping in normal Python strings. + backend : str, optional + Currently, there are two backends supported: ANTLR, and Lark. + The default setting is to use the ANTLR backend, which can be + changed to Lark if preferred. + + Use ``backend="antlr"`` for the ANTLR-based parser, and + ``backend="lark"`` for the Lark-based parser. + + The ``backend`` option is case-sensitive, and must be in + all lowercase. + strict : bool, optional + This option is only available with the ANTLR backend. + + If True, raise an exception if the string cannot be parsed as + valid LaTeX. If False, try to recover gracefully from common + mistakes. + + Examples + ======== + + >>> from sympy.parsing.latex import parse_latex + >>> expr = parse_latex(r"\frac {1 + \sqrt {\a}} {\b}") + >>> expr + (sqrt(a) + 1)/b + >>> expr.evalf(4, subs=dict(a=5, b=2)) + 1.618 + >>> func = parse_latex(r"\int_1^\alpha \dfrac{\mathrm{d}t}{t}", backend="lark") + >>> func.evalf(subs={"alpha": 2}) + 0.693147180559945 + """ + + if backend == "antlr": + _latex = import_module( + 'sympy.parsing.latex._parse_latex_antlr', + import_kwargs={'fromlist': ['X']}) + + if _latex is not None: + return _latex.parse_latex(s, strict) + elif backend == "lark": + return parse_latex_lark(s) + else: + raise NotImplementedError(f"Using the '{backend}' backend in the LaTeX" \ + " parser is not supported.") diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/__init__.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e25d615d3fe773887fa372ee922cc58ffdd5e655 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/_build_latex_antlr.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/_build_latex_antlr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..972eaa2865ddde4d8e83ef57a07850eccfc9bd2d Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/_build_latex_antlr.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/_parse_latex_antlr.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/_parse_latex_antlr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44be20ea38e2e483ddbb80cec8bcd8b09220b75b Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/_parse_latex_antlr.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/errors.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/errors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82612070fb5ed677e01ceb3d66d1bfc3935e097a Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/errors.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2d690e1eb8631ee7731fc1875769d3a4704a1743 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/__init__.py @@ -0,0 +1,9 @@ +# *** GENERATED BY `setup.py antlr`, DO NOT EDIT BY HAND *** +# +# Generated from ../LaTeX.g4, derived from latex2sympy +# latex2sympy is licensed under the MIT license +# https://github.com/augustt198/latex2sympy/blob/master/LICENSE.txt +# +# Generated with antlr4 +# antlr4 is licensed under the BSD-3-Clause License +# https://github.com/antlr/antlr4/blob/master/LICENSE.txt diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/latexlexer.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/latexlexer.py new file mode 100644 index 0000000000000000000000000000000000000000..46ca959736c967782eef360b9b3268ccd0be0979 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/latexlexer.py @@ -0,0 +1,512 @@ +# *** GENERATED BY `setup.py antlr`, DO NOT EDIT BY HAND *** +# +# Generated from ../LaTeX.g4, derived from latex2sympy +# latex2sympy is licensed under the MIT license +# https://github.com/augustt198/latex2sympy/blob/master/LICENSE.txt +# +# Generated with antlr4 +# antlr4 is licensed under the BSD-3-Clause License +# https://github.com/antlr/antlr4/blob/master/LICENSE.txt +from antlr4 import * +from io import StringIO +import sys +if sys.version_info[1] > 5: + from typing import TextIO +else: + from typing.io import TextIO + + +def serializedATN(): + return [ + 4,0,91,911,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, + 2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2, + 13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7, + 19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2, + 26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,31,7,31,2,32,7, + 32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,7,38,2, + 39,7,39,2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7, + 45,2,46,7,46,2,47,7,47,2,48,7,48,2,49,7,49,2,50,7,50,2,51,7,51,2, + 52,7,52,2,53,7,53,2,54,7,54,2,55,7,55,2,56,7,56,2,57,7,57,2,58,7, + 58,2,59,7,59,2,60,7,60,2,61,7,61,2,62,7,62,2,63,7,63,2,64,7,64,2, + 65,7,65,2,66,7,66,2,67,7,67,2,68,7,68,2,69,7,69,2,70,7,70,2,71,7, + 71,2,72,7,72,2,73,7,73,2,74,7,74,2,75,7,75,2,76,7,76,2,77,7,77,2, + 78,7,78,2,79,7,79,2,80,7,80,2,81,7,81,2,82,7,82,2,83,7,83,2,84,7, + 84,2,85,7,85,2,86,7,86,2,87,7,87,2,88,7,88,2,89,7,89,2,90,7,90,2, + 91,7,91,1,0,1,0,1,1,1,1,1,2,4,2,191,8,2,11,2,12,2,192,1,2,1,2,1, + 3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,3,3,209,8,3,1,3,1, + 3,1,4,1,4,1,4,1,4,1,4,1,4,1,4,1,4,1,4,1,4,1,4,3,4,224,8,4,1,4,1, + 4,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,3,5,241,8, + 5,1,5,1,5,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,7,1,7,1,7,1,7,1,7,1, + 7,1,7,1,7,1,7,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1, + 8,1,8,1,8,3,8,277,8,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9,1, + 9,1,9,1,9,1,9,1,9,1,9,1,9,1,10,1,10,1,10,1,10,1,10,1,10,1,10,1,10, + 1,10,1,10,1,10,1,10,1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11, + 1,11,1,11,1,11,1,11,1,12,1,12,1,12,1,12,1,12,1,12,1,12,1,12,1,12, + 1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,3,13, + 381,8,13,1,13,1,13,1,14,1,14,1,15,1,15,1,16,1,16,1,17,1,17,1,18, + 1,18,1,19,1,19,1,20,1,20,1,21,1,21,1,22,1,22,1,22,1,23,1,23,1,23, + 1,24,1,24,1,25,1,25,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,27,1,27, + 1,27,1,28,1,28,1,28,1,28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29, + 1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30,1,30,1,30,1,31,1,31, + 1,31,1,31,1,31,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32, + 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32, + 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32, + 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32, + 1,32,1,32,1,32,1,32,1,32,1,32,3,32,504,8,32,1,33,1,33,1,33,1,33, + 1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,3,33,521, + 8,33,1,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,1,35,1,36, + 1,36,1,36,1,36,1,36,1,37,1,37,1,37,1,37,1,37,1,38,1,38,1,38,1,38, + 1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41, + 1,41,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1,43,1,44,1,44, + 1,44,1,44,1,44,1,45,1,45,1,45,1,45,1,45,1,46,1,46,1,46,1,46,1,46, + 1,46,1,46,1,46,1,47,1,47,1,47,1,47,1,47,1,47,1,47,1,47,1,48,1,48, + 1,48,1,48,1,48,1,48,1,48,1,48,1,49,1,49,1,49,1,49,1,49,1,49,1,49, + 1,49,1,50,1,50,1,50,1,50,1,50,1,50,1,50,1,50,1,51,1,51,1,51,1,51, + 1,51,1,51,1,51,1,51,1,52,1,52,1,52,1,52,1,52,1,52,1,53,1,53,1,53, + 1,53,1,53,1,53,1,54,1,54,1,54,1,54,1,54,1,54,1,55,1,55,1,55,1,55, + 1,55,1,55,1,55,1,55,1,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,1,57, + 1,57,1,57,1,57,1,57,1,57,1,57,1,57,1,58,1,58,1,58,1,58,1,58,1,58, + 1,58,1,58,1,59,1,59,1,59,1,59,1,59,1,59,1,59,1,59,1,60,1,60,1,60, + 1,60,1,60,1,60,1,60,1,61,1,61,1,61,1,61,1,61,1,61,1,61,1,62,1,62, + 1,62,1,62,1,62,1,62,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63, + 1,63,1,64,1,64,1,64,1,64,1,64,1,64,1,64,1,65,1,65,1,65,1,65,1,65, + 1,65,1,66,1,66,1,66,1,66,1,66,1,67,1,67,1,67,1,67,1,67,1,67,1,67, + 1,67,1,67,1,67,1,67,1,67,1,67,1,67,1,67,1,67,1,67,3,67,753,8,67, + 1,68,1,68,1,68,1,68,1,68,1,68,1,68,1,69,1,69,1,69,1,69,1,69,1,69, + 1,69,1,69,1,70,1,70,1,70,1,70,1,70,1,70,1,70,1,70,1,71,1,71,1,71, + 1,71,1,71,1,71,1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,75,1,75, + 1,76,1,76,5,76,796,8,76,10,76,12,76,799,9,76,1,76,1,76,1,76,4,76, + 804,8,76,11,76,12,76,805,3,76,808,8,76,1,77,1,77,1,78,1,78,1,79, + 1,79,5,79,816,8,79,10,79,12,79,819,9,79,3,79,821,8,79,1,79,1,79, + 1,79,5,79,826,8,79,10,79,12,79,829,9,79,1,79,3,79,832,8,79,3,79, + 834,8,79,1,80,1,80,1,80,1,80,1,80,1,81,1,81,1,82,1,82,1,82,1,82, + 1,82,1,82,1,82,1,82,1,82,3,82,852,8,82,1,83,1,83,1,83,1,83,1,83, + 1,83,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,85,1,85, + 1,86,1,86,1,86,1,86,1,86,1,86,1,86,1,86,1,86,3,86,881,8,86,1,87, + 1,87,1,87,1,87,1,87,1,87,1,88,1,88,1,88,1,88,1,88,1,88,1,88,1,88, + 1,88,1,88,1,89,1,89,1,90,4,90,902,8,90,11,90,12,90,903,1,91,1,91, + 4,91,908,8,91,11,91,12,91,909,3,797,817,827,0,92,1,1,3,2,5,3,7,4, + 9,5,11,6,13,7,15,8,17,9,19,10,21,11,23,12,25,13,27,14,29,15,31,16, + 33,17,35,18,37,19,39,20,41,21,43,22,45,23,47,24,49,25,51,26,53,27, + 55,28,57,29,59,30,61,31,63,32,65,33,67,34,69,35,71,36,73,37,75,38, + 77,39,79,40,81,41,83,42,85,43,87,44,89,45,91,46,93,47,95,48,97,49, + 99,50,101,51,103,52,105,53,107,54,109,55,111,56,113,57,115,58,117, + 59,119,60,121,61,123,62,125,63,127,64,129,65,131,66,133,67,135,68, + 137,69,139,70,141,71,143,72,145,73,147,74,149,75,151,0,153,76,155, + 77,157,78,159,79,161,80,163,81,165,82,167,83,169,84,171,85,173,86, + 175,87,177,88,179,89,181,90,183,91,1,0,3,3,0,9,10,13,13,32,32,2, + 0,65,90,97,122,1,0,48,57,949,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0, + 0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0, + 17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0, + 27,1,0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0, + 37,1,0,0,0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0, + 47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0, + 57,1,0,0,0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1,0,0,0,0,65,1,0,0,0,0, + 67,1,0,0,0,0,69,1,0,0,0,0,71,1,0,0,0,0,73,1,0,0,0,0,75,1,0,0,0,0, + 77,1,0,0,0,0,79,1,0,0,0,0,81,1,0,0,0,0,83,1,0,0,0,0,85,1,0,0,0,0, + 87,1,0,0,0,0,89,1,0,0,0,0,91,1,0,0,0,0,93,1,0,0,0,0,95,1,0,0,0,0, + 97,1,0,0,0,0,99,1,0,0,0,0,101,1,0,0,0,0,103,1,0,0,0,0,105,1,0,0, + 0,0,107,1,0,0,0,0,109,1,0,0,0,0,111,1,0,0,0,0,113,1,0,0,0,0,115, + 1,0,0,0,0,117,1,0,0,0,0,119,1,0,0,0,0,121,1,0,0,0,0,123,1,0,0,0, + 0,125,1,0,0,0,0,127,1,0,0,0,0,129,1,0,0,0,0,131,1,0,0,0,0,133,1, + 0,0,0,0,135,1,0,0,0,0,137,1,0,0,0,0,139,1,0,0,0,0,141,1,0,0,0,0, + 143,1,0,0,0,0,145,1,0,0,0,0,147,1,0,0,0,0,149,1,0,0,0,0,153,1,0, + 0,0,0,155,1,0,0,0,0,157,1,0,0,0,0,159,1,0,0,0,0,161,1,0,0,0,0,163, + 1,0,0,0,0,165,1,0,0,0,0,167,1,0,0,0,0,169,1,0,0,0,0,171,1,0,0,0, + 0,173,1,0,0,0,0,175,1,0,0,0,0,177,1,0,0,0,0,179,1,0,0,0,0,181,1, + 0,0,0,0,183,1,0,0,0,1,185,1,0,0,0,3,187,1,0,0,0,5,190,1,0,0,0,7, + 208,1,0,0,0,9,223,1,0,0,0,11,240,1,0,0,0,13,244,1,0,0,0,15,252,1, + 0,0,0,17,276,1,0,0,0,19,280,1,0,0,0,21,295,1,0,0,0,23,312,1,0,0, + 0,25,320,1,0,0,0,27,380,1,0,0,0,29,384,1,0,0,0,31,386,1,0,0,0,33, + 388,1,0,0,0,35,390,1,0,0,0,37,392,1,0,0,0,39,394,1,0,0,0,41,396, + 1,0,0,0,43,398,1,0,0,0,45,400,1,0,0,0,47,403,1,0,0,0,49,406,1,0, + 0,0,51,408,1,0,0,0,53,410,1,0,0,0,55,412,1,0,0,0,57,420,1,0,0,0, + 59,427,1,0,0,0,61,435,1,0,0,0,63,443,1,0,0,0,65,503,1,0,0,0,67,520, + 1,0,0,0,69,522,1,0,0,0,71,527,1,0,0,0,73,533,1,0,0,0,75,538,1,0, + 0,0,77,543,1,0,0,0,79,547,1,0,0,0,81,551,1,0,0,0,83,556,1,0,0,0, + 85,561,1,0,0,0,87,566,1,0,0,0,89,571,1,0,0,0,91,576,1,0,0,0,93,581, + 1,0,0,0,95,589,1,0,0,0,97,597,1,0,0,0,99,605,1,0,0,0,101,613,1,0, + 0,0,103,621,1,0,0,0,105,629,1,0,0,0,107,635,1,0,0,0,109,641,1,0, + 0,0,111,647,1,0,0,0,113,655,1,0,0,0,115,663,1,0,0,0,117,671,1,0, + 0,0,119,679,1,0,0,0,121,687,1,0,0,0,123,694,1,0,0,0,125,701,1,0, + 0,0,127,707,1,0,0,0,129,717,1,0,0,0,131,724,1,0,0,0,133,730,1,0, + 0,0,135,752,1,0,0,0,137,754,1,0,0,0,139,761,1,0,0,0,141,769,1,0, + 0,0,143,777,1,0,0,0,145,785,1,0,0,0,147,787,1,0,0,0,149,789,1,0, + 0,0,151,791,1,0,0,0,153,793,1,0,0,0,155,809,1,0,0,0,157,811,1,0, + 0,0,159,833,1,0,0,0,161,835,1,0,0,0,163,840,1,0,0,0,165,851,1,0, + 0,0,167,853,1,0,0,0,169,859,1,0,0,0,171,869,1,0,0,0,173,880,1,0, + 0,0,175,882,1,0,0,0,177,888,1,0,0,0,179,898,1,0,0,0,181,901,1,0, + 0,0,183,905,1,0,0,0,185,186,5,44,0,0,186,2,1,0,0,0,187,188,5,46, + 0,0,188,4,1,0,0,0,189,191,7,0,0,0,190,189,1,0,0,0,191,192,1,0,0, + 0,192,190,1,0,0,0,192,193,1,0,0,0,193,194,1,0,0,0,194,195,6,2,0, + 0,195,6,1,0,0,0,196,197,5,92,0,0,197,209,5,44,0,0,198,199,5,92,0, + 0,199,200,5,116,0,0,200,201,5,104,0,0,201,202,5,105,0,0,202,203, + 5,110,0,0,203,204,5,115,0,0,204,205,5,112,0,0,205,206,5,97,0,0,206, + 207,5,99,0,0,207,209,5,101,0,0,208,196,1,0,0,0,208,198,1,0,0,0,209, + 210,1,0,0,0,210,211,6,3,0,0,211,8,1,0,0,0,212,213,5,92,0,0,213,224, + 5,58,0,0,214,215,5,92,0,0,215,216,5,109,0,0,216,217,5,101,0,0,217, + 218,5,100,0,0,218,219,5,115,0,0,219,220,5,112,0,0,220,221,5,97,0, + 0,221,222,5,99,0,0,222,224,5,101,0,0,223,212,1,0,0,0,223,214,1,0, + 0,0,224,225,1,0,0,0,225,226,6,4,0,0,226,10,1,0,0,0,227,228,5,92, + 0,0,228,241,5,59,0,0,229,230,5,92,0,0,230,231,5,116,0,0,231,232, + 5,104,0,0,232,233,5,105,0,0,233,234,5,99,0,0,234,235,5,107,0,0,235, + 236,5,115,0,0,236,237,5,112,0,0,237,238,5,97,0,0,238,239,5,99,0, + 0,239,241,5,101,0,0,240,227,1,0,0,0,240,229,1,0,0,0,241,242,1,0, + 0,0,242,243,6,5,0,0,243,12,1,0,0,0,244,245,5,92,0,0,245,246,5,113, + 0,0,246,247,5,117,0,0,247,248,5,97,0,0,248,249,5,100,0,0,249,250, + 1,0,0,0,250,251,6,6,0,0,251,14,1,0,0,0,252,253,5,92,0,0,253,254, + 5,113,0,0,254,255,5,113,0,0,255,256,5,117,0,0,256,257,5,97,0,0,257, + 258,5,100,0,0,258,259,1,0,0,0,259,260,6,7,0,0,260,16,1,0,0,0,261, + 262,5,92,0,0,262,277,5,33,0,0,263,264,5,92,0,0,264,265,5,110,0,0, + 265,266,5,101,0,0,266,267,5,103,0,0,267,268,5,116,0,0,268,269,5, + 104,0,0,269,270,5,105,0,0,270,271,5,110,0,0,271,272,5,115,0,0,272, + 273,5,112,0,0,273,274,5,97,0,0,274,275,5,99,0,0,275,277,5,101,0, + 0,276,261,1,0,0,0,276,263,1,0,0,0,277,278,1,0,0,0,278,279,6,8,0, + 0,279,18,1,0,0,0,280,281,5,92,0,0,281,282,5,110,0,0,282,283,5,101, + 0,0,283,284,5,103,0,0,284,285,5,109,0,0,285,286,5,101,0,0,286,287, + 5,100,0,0,287,288,5,115,0,0,288,289,5,112,0,0,289,290,5,97,0,0,290, + 291,5,99,0,0,291,292,5,101,0,0,292,293,1,0,0,0,293,294,6,9,0,0,294, + 20,1,0,0,0,295,296,5,92,0,0,296,297,5,110,0,0,297,298,5,101,0,0, + 298,299,5,103,0,0,299,300,5,116,0,0,300,301,5,104,0,0,301,302,5, + 105,0,0,302,303,5,99,0,0,303,304,5,107,0,0,304,305,5,115,0,0,305, + 306,5,112,0,0,306,307,5,97,0,0,307,308,5,99,0,0,308,309,5,101,0, + 0,309,310,1,0,0,0,310,311,6,10,0,0,311,22,1,0,0,0,312,313,5,92,0, + 0,313,314,5,108,0,0,314,315,5,101,0,0,315,316,5,102,0,0,316,317, + 5,116,0,0,317,318,1,0,0,0,318,319,6,11,0,0,319,24,1,0,0,0,320,321, + 5,92,0,0,321,322,5,114,0,0,322,323,5,105,0,0,323,324,5,103,0,0,324, + 325,5,104,0,0,325,326,5,116,0,0,326,327,1,0,0,0,327,328,6,12,0,0, + 328,26,1,0,0,0,329,330,5,92,0,0,330,331,5,118,0,0,331,332,5,114, + 0,0,332,333,5,117,0,0,333,334,5,108,0,0,334,381,5,101,0,0,335,336, + 5,92,0,0,336,337,5,118,0,0,337,338,5,99,0,0,338,339,5,101,0,0,339, + 340,5,110,0,0,340,341,5,116,0,0,341,342,5,101,0,0,342,381,5,114, + 0,0,343,344,5,92,0,0,344,345,5,118,0,0,345,346,5,98,0,0,346,347, + 5,111,0,0,347,381,5,120,0,0,348,349,5,92,0,0,349,350,5,118,0,0,350, + 351,5,115,0,0,351,352,5,107,0,0,352,353,5,105,0,0,353,381,5,112, + 0,0,354,355,5,92,0,0,355,356,5,118,0,0,356,357,5,115,0,0,357,358, + 5,112,0,0,358,359,5,97,0,0,359,360,5,99,0,0,360,381,5,101,0,0,361, + 362,5,92,0,0,362,363,5,104,0,0,363,364,5,102,0,0,364,365,5,105,0, + 0,365,381,5,108,0,0,366,367,5,92,0,0,367,381,5,42,0,0,368,369,5, + 92,0,0,369,381,5,45,0,0,370,371,5,92,0,0,371,381,5,46,0,0,372,373, + 5,92,0,0,373,381,5,47,0,0,374,375,5,92,0,0,375,381,5,34,0,0,376, + 377,5,92,0,0,377,381,5,40,0,0,378,379,5,92,0,0,379,381,5,61,0,0, + 380,329,1,0,0,0,380,335,1,0,0,0,380,343,1,0,0,0,380,348,1,0,0,0, + 380,354,1,0,0,0,380,361,1,0,0,0,380,366,1,0,0,0,380,368,1,0,0,0, + 380,370,1,0,0,0,380,372,1,0,0,0,380,374,1,0,0,0,380,376,1,0,0,0, + 380,378,1,0,0,0,381,382,1,0,0,0,382,383,6,13,0,0,383,28,1,0,0,0, + 384,385,5,43,0,0,385,30,1,0,0,0,386,387,5,45,0,0,387,32,1,0,0,0, + 388,389,5,42,0,0,389,34,1,0,0,0,390,391,5,47,0,0,391,36,1,0,0,0, + 392,393,5,40,0,0,393,38,1,0,0,0,394,395,5,41,0,0,395,40,1,0,0,0, + 396,397,5,123,0,0,397,42,1,0,0,0,398,399,5,125,0,0,399,44,1,0,0, + 0,400,401,5,92,0,0,401,402,5,123,0,0,402,46,1,0,0,0,403,404,5,92, + 0,0,404,405,5,125,0,0,405,48,1,0,0,0,406,407,5,91,0,0,407,50,1,0, + 0,0,408,409,5,93,0,0,409,52,1,0,0,0,410,411,5,124,0,0,411,54,1,0, + 0,0,412,413,5,92,0,0,413,414,5,114,0,0,414,415,5,105,0,0,415,416, + 5,103,0,0,416,417,5,104,0,0,417,418,5,116,0,0,418,419,5,124,0,0, + 419,56,1,0,0,0,420,421,5,92,0,0,421,422,5,108,0,0,422,423,5,101, + 0,0,423,424,5,102,0,0,424,425,5,116,0,0,425,426,5,124,0,0,426,58, + 1,0,0,0,427,428,5,92,0,0,428,429,5,108,0,0,429,430,5,97,0,0,430, + 431,5,110,0,0,431,432,5,103,0,0,432,433,5,108,0,0,433,434,5,101, + 0,0,434,60,1,0,0,0,435,436,5,92,0,0,436,437,5,114,0,0,437,438,5, + 97,0,0,438,439,5,110,0,0,439,440,5,103,0,0,440,441,5,108,0,0,441, + 442,5,101,0,0,442,62,1,0,0,0,443,444,5,92,0,0,444,445,5,108,0,0, + 445,446,5,105,0,0,446,447,5,109,0,0,447,64,1,0,0,0,448,449,5,92, + 0,0,449,450,5,116,0,0,450,504,5,111,0,0,451,452,5,92,0,0,452,453, + 5,114,0,0,453,454,5,105,0,0,454,455,5,103,0,0,455,456,5,104,0,0, + 456,457,5,116,0,0,457,458,5,97,0,0,458,459,5,114,0,0,459,460,5,114, + 0,0,460,461,5,111,0,0,461,504,5,119,0,0,462,463,5,92,0,0,463,464, + 5,82,0,0,464,465,5,105,0,0,465,466,5,103,0,0,466,467,5,104,0,0,467, + 468,5,116,0,0,468,469,5,97,0,0,469,470,5,114,0,0,470,471,5,114,0, + 0,471,472,5,111,0,0,472,504,5,119,0,0,473,474,5,92,0,0,474,475,5, + 108,0,0,475,476,5,111,0,0,476,477,5,110,0,0,477,478,5,103,0,0,478, + 479,5,114,0,0,479,480,5,105,0,0,480,481,5,103,0,0,481,482,5,104, + 0,0,482,483,5,116,0,0,483,484,5,97,0,0,484,485,5,114,0,0,485,486, + 5,114,0,0,486,487,5,111,0,0,487,504,5,119,0,0,488,489,5,92,0,0,489, + 490,5,76,0,0,490,491,5,111,0,0,491,492,5,110,0,0,492,493,5,103,0, + 0,493,494,5,114,0,0,494,495,5,105,0,0,495,496,5,103,0,0,496,497, + 5,104,0,0,497,498,5,116,0,0,498,499,5,97,0,0,499,500,5,114,0,0,500, + 501,5,114,0,0,501,502,5,111,0,0,502,504,5,119,0,0,503,448,1,0,0, + 0,503,451,1,0,0,0,503,462,1,0,0,0,503,473,1,0,0,0,503,488,1,0,0, + 0,504,66,1,0,0,0,505,506,5,92,0,0,506,507,5,105,0,0,507,508,5,110, + 0,0,508,521,5,116,0,0,509,510,5,92,0,0,510,511,5,105,0,0,511,512, + 5,110,0,0,512,513,5,116,0,0,513,514,5,92,0,0,514,515,5,108,0,0,515, + 516,5,105,0,0,516,517,5,109,0,0,517,518,5,105,0,0,518,519,5,116, + 0,0,519,521,5,115,0,0,520,505,1,0,0,0,520,509,1,0,0,0,521,68,1,0, + 0,0,522,523,5,92,0,0,523,524,5,115,0,0,524,525,5,117,0,0,525,526, + 5,109,0,0,526,70,1,0,0,0,527,528,5,92,0,0,528,529,5,112,0,0,529, + 530,5,114,0,0,530,531,5,111,0,0,531,532,5,100,0,0,532,72,1,0,0,0, + 533,534,5,92,0,0,534,535,5,101,0,0,535,536,5,120,0,0,536,537,5,112, + 0,0,537,74,1,0,0,0,538,539,5,92,0,0,539,540,5,108,0,0,540,541,5, + 111,0,0,541,542,5,103,0,0,542,76,1,0,0,0,543,544,5,92,0,0,544,545, + 5,108,0,0,545,546,5,103,0,0,546,78,1,0,0,0,547,548,5,92,0,0,548, + 549,5,108,0,0,549,550,5,110,0,0,550,80,1,0,0,0,551,552,5,92,0,0, + 552,553,5,115,0,0,553,554,5,105,0,0,554,555,5,110,0,0,555,82,1,0, + 0,0,556,557,5,92,0,0,557,558,5,99,0,0,558,559,5,111,0,0,559,560, + 5,115,0,0,560,84,1,0,0,0,561,562,5,92,0,0,562,563,5,116,0,0,563, + 564,5,97,0,0,564,565,5,110,0,0,565,86,1,0,0,0,566,567,5,92,0,0,567, + 568,5,99,0,0,568,569,5,115,0,0,569,570,5,99,0,0,570,88,1,0,0,0,571, + 572,5,92,0,0,572,573,5,115,0,0,573,574,5,101,0,0,574,575,5,99,0, + 0,575,90,1,0,0,0,576,577,5,92,0,0,577,578,5,99,0,0,578,579,5,111, + 0,0,579,580,5,116,0,0,580,92,1,0,0,0,581,582,5,92,0,0,582,583,5, + 97,0,0,583,584,5,114,0,0,584,585,5,99,0,0,585,586,5,115,0,0,586, + 587,5,105,0,0,587,588,5,110,0,0,588,94,1,0,0,0,589,590,5,92,0,0, + 590,591,5,97,0,0,591,592,5,114,0,0,592,593,5,99,0,0,593,594,5,99, + 0,0,594,595,5,111,0,0,595,596,5,115,0,0,596,96,1,0,0,0,597,598,5, + 92,0,0,598,599,5,97,0,0,599,600,5,114,0,0,600,601,5,99,0,0,601,602, + 5,116,0,0,602,603,5,97,0,0,603,604,5,110,0,0,604,98,1,0,0,0,605, + 606,5,92,0,0,606,607,5,97,0,0,607,608,5,114,0,0,608,609,5,99,0,0, + 609,610,5,99,0,0,610,611,5,115,0,0,611,612,5,99,0,0,612,100,1,0, + 0,0,613,614,5,92,0,0,614,615,5,97,0,0,615,616,5,114,0,0,616,617, + 5,99,0,0,617,618,5,115,0,0,618,619,5,101,0,0,619,620,5,99,0,0,620, + 102,1,0,0,0,621,622,5,92,0,0,622,623,5,97,0,0,623,624,5,114,0,0, + 624,625,5,99,0,0,625,626,5,99,0,0,626,627,5,111,0,0,627,628,5,116, + 0,0,628,104,1,0,0,0,629,630,5,92,0,0,630,631,5,115,0,0,631,632,5, + 105,0,0,632,633,5,110,0,0,633,634,5,104,0,0,634,106,1,0,0,0,635, + 636,5,92,0,0,636,637,5,99,0,0,637,638,5,111,0,0,638,639,5,115,0, + 0,639,640,5,104,0,0,640,108,1,0,0,0,641,642,5,92,0,0,642,643,5,116, + 0,0,643,644,5,97,0,0,644,645,5,110,0,0,645,646,5,104,0,0,646,110, + 1,0,0,0,647,648,5,92,0,0,648,649,5,97,0,0,649,650,5,114,0,0,650, + 651,5,115,0,0,651,652,5,105,0,0,652,653,5,110,0,0,653,654,5,104, + 0,0,654,112,1,0,0,0,655,656,5,92,0,0,656,657,5,97,0,0,657,658,5, + 114,0,0,658,659,5,99,0,0,659,660,5,111,0,0,660,661,5,115,0,0,661, + 662,5,104,0,0,662,114,1,0,0,0,663,664,5,92,0,0,664,665,5,97,0,0, + 665,666,5,114,0,0,666,667,5,116,0,0,667,668,5,97,0,0,668,669,5,110, + 0,0,669,670,5,104,0,0,670,116,1,0,0,0,671,672,5,92,0,0,672,673,5, + 108,0,0,673,674,5,102,0,0,674,675,5,108,0,0,675,676,5,111,0,0,676, + 677,5,111,0,0,677,678,5,114,0,0,678,118,1,0,0,0,679,680,5,92,0,0, + 680,681,5,114,0,0,681,682,5,102,0,0,682,683,5,108,0,0,683,684,5, + 111,0,0,684,685,5,111,0,0,685,686,5,114,0,0,686,120,1,0,0,0,687, + 688,5,92,0,0,688,689,5,108,0,0,689,690,5,99,0,0,690,691,5,101,0, + 0,691,692,5,105,0,0,692,693,5,108,0,0,693,122,1,0,0,0,694,695,5, + 92,0,0,695,696,5,114,0,0,696,697,5,99,0,0,697,698,5,101,0,0,698, + 699,5,105,0,0,699,700,5,108,0,0,700,124,1,0,0,0,701,702,5,92,0,0, + 702,703,5,115,0,0,703,704,5,113,0,0,704,705,5,114,0,0,705,706,5, + 116,0,0,706,126,1,0,0,0,707,708,5,92,0,0,708,709,5,111,0,0,709,710, + 5,118,0,0,710,711,5,101,0,0,711,712,5,114,0,0,712,713,5,108,0,0, + 713,714,5,105,0,0,714,715,5,110,0,0,715,716,5,101,0,0,716,128,1, + 0,0,0,717,718,5,92,0,0,718,719,5,116,0,0,719,720,5,105,0,0,720,721, + 5,109,0,0,721,722,5,101,0,0,722,723,5,115,0,0,723,130,1,0,0,0,724, + 725,5,92,0,0,725,726,5,99,0,0,726,727,5,100,0,0,727,728,5,111,0, + 0,728,729,5,116,0,0,729,132,1,0,0,0,730,731,5,92,0,0,731,732,5,100, + 0,0,732,733,5,105,0,0,733,734,5,118,0,0,734,134,1,0,0,0,735,736, + 5,92,0,0,736,737,5,102,0,0,737,738,5,114,0,0,738,739,5,97,0,0,739, + 753,5,99,0,0,740,741,5,92,0,0,741,742,5,100,0,0,742,743,5,102,0, + 0,743,744,5,114,0,0,744,745,5,97,0,0,745,753,5,99,0,0,746,747,5, + 92,0,0,747,748,5,116,0,0,748,749,5,102,0,0,749,750,5,114,0,0,750, + 751,5,97,0,0,751,753,5,99,0,0,752,735,1,0,0,0,752,740,1,0,0,0,752, + 746,1,0,0,0,753,136,1,0,0,0,754,755,5,92,0,0,755,756,5,98,0,0,756, + 757,5,105,0,0,757,758,5,110,0,0,758,759,5,111,0,0,759,760,5,109, + 0,0,760,138,1,0,0,0,761,762,5,92,0,0,762,763,5,100,0,0,763,764,5, + 98,0,0,764,765,5,105,0,0,765,766,5,110,0,0,766,767,5,111,0,0,767, + 768,5,109,0,0,768,140,1,0,0,0,769,770,5,92,0,0,770,771,5,116,0,0, + 771,772,5,98,0,0,772,773,5,105,0,0,773,774,5,110,0,0,774,775,5,111, + 0,0,775,776,5,109,0,0,776,142,1,0,0,0,777,778,5,92,0,0,778,779,5, + 109,0,0,779,780,5,97,0,0,780,781,5,116,0,0,781,782,5,104,0,0,782, + 783,5,105,0,0,783,784,5,116,0,0,784,144,1,0,0,0,785,786,5,95,0,0, + 786,146,1,0,0,0,787,788,5,94,0,0,788,148,1,0,0,0,789,790,5,58,0, + 0,790,150,1,0,0,0,791,792,7,0,0,0,792,152,1,0,0,0,793,797,5,100, + 0,0,794,796,3,151,75,0,795,794,1,0,0,0,796,799,1,0,0,0,797,798,1, + 0,0,0,797,795,1,0,0,0,798,807,1,0,0,0,799,797,1,0,0,0,800,808,7, + 1,0,0,801,803,5,92,0,0,802,804,7,1,0,0,803,802,1,0,0,0,804,805,1, + 0,0,0,805,803,1,0,0,0,805,806,1,0,0,0,806,808,1,0,0,0,807,800,1, + 0,0,0,807,801,1,0,0,0,808,154,1,0,0,0,809,810,7,1,0,0,810,156,1, + 0,0,0,811,812,7,2,0,0,812,158,1,0,0,0,813,817,5,38,0,0,814,816,3, + 151,75,0,815,814,1,0,0,0,816,819,1,0,0,0,817,818,1,0,0,0,817,815, + 1,0,0,0,818,821,1,0,0,0,819,817,1,0,0,0,820,813,1,0,0,0,820,821, + 1,0,0,0,821,822,1,0,0,0,822,834,5,61,0,0,823,831,5,61,0,0,824,826, + 3,151,75,0,825,824,1,0,0,0,826,829,1,0,0,0,827,828,1,0,0,0,827,825, + 1,0,0,0,828,830,1,0,0,0,829,827,1,0,0,0,830,832,5,38,0,0,831,827, + 1,0,0,0,831,832,1,0,0,0,832,834,1,0,0,0,833,820,1,0,0,0,833,823, + 1,0,0,0,834,160,1,0,0,0,835,836,5,92,0,0,836,837,5,110,0,0,837,838, + 5,101,0,0,838,839,5,113,0,0,839,162,1,0,0,0,840,841,5,60,0,0,841, + 164,1,0,0,0,842,843,5,92,0,0,843,844,5,108,0,0,844,845,5,101,0,0, + 845,852,5,113,0,0,846,847,5,92,0,0,847,848,5,108,0,0,848,852,5,101, + 0,0,849,852,3,167,83,0,850,852,3,169,84,0,851,842,1,0,0,0,851,846, + 1,0,0,0,851,849,1,0,0,0,851,850,1,0,0,0,852,166,1,0,0,0,853,854, + 5,92,0,0,854,855,5,108,0,0,855,856,5,101,0,0,856,857,5,113,0,0,857, + 858,5,113,0,0,858,168,1,0,0,0,859,860,5,92,0,0,860,861,5,108,0,0, + 861,862,5,101,0,0,862,863,5,113,0,0,863,864,5,115,0,0,864,865,5, + 108,0,0,865,866,5,97,0,0,866,867,5,110,0,0,867,868,5,116,0,0,868, + 170,1,0,0,0,869,870,5,62,0,0,870,172,1,0,0,0,871,872,5,92,0,0,872, + 873,5,103,0,0,873,874,5,101,0,0,874,881,5,113,0,0,875,876,5,92,0, + 0,876,877,5,103,0,0,877,881,5,101,0,0,878,881,3,175,87,0,879,881, + 3,177,88,0,880,871,1,0,0,0,880,875,1,0,0,0,880,878,1,0,0,0,880,879, + 1,0,0,0,881,174,1,0,0,0,882,883,5,92,0,0,883,884,5,103,0,0,884,885, + 5,101,0,0,885,886,5,113,0,0,886,887,5,113,0,0,887,176,1,0,0,0,888, + 889,5,92,0,0,889,890,5,103,0,0,890,891,5,101,0,0,891,892,5,113,0, + 0,892,893,5,115,0,0,893,894,5,108,0,0,894,895,5,97,0,0,895,896,5, + 110,0,0,896,897,5,116,0,0,897,178,1,0,0,0,898,899,5,33,0,0,899,180, + 1,0,0,0,900,902,5,39,0,0,901,900,1,0,0,0,902,903,1,0,0,0,903,901, + 1,0,0,0,903,904,1,0,0,0,904,182,1,0,0,0,905,907,5,92,0,0,906,908, + 7,1,0,0,907,906,1,0,0,0,908,909,1,0,0,0,909,907,1,0,0,0,909,910, + 1,0,0,0,910,184,1,0,0,0,22,0,192,208,223,240,276,380,503,520,752, + 797,805,807,817,820,827,831,833,851,880,903,909,1,6,0,0 + ] + +class LaTeXLexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + T__0 = 1 + T__1 = 2 + WS = 3 + THINSPACE = 4 + MEDSPACE = 5 + THICKSPACE = 6 + QUAD = 7 + QQUAD = 8 + NEGTHINSPACE = 9 + NEGMEDSPACE = 10 + NEGTHICKSPACE = 11 + CMD_LEFT = 12 + CMD_RIGHT = 13 + IGNORE = 14 + ADD = 15 + SUB = 16 + MUL = 17 + DIV = 18 + L_PAREN = 19 + R_PAREN = 20 + L_BRACE = 21 + R_BRACE = 22 + L_BRACE_LITERAL = 23 + R_BRACE_LITERAL = 24 + L_BRACKET = 25 + R_BRACKET = 26 + BAR = 27 + R_BAR = 28 + L_BAR = 29 + L_ANGLE = 30 + R_ANGLE = 31 + FUNC_LIM = 32 + LIM_APPROACH_SYM = 33 + FUNC_INT = 34 + FUNC_SUM = 35 + FUNC_PROD = 36 + FUNC_EXP = 37 + FUNC_LOG = 38 + FUNC_LG = 39 + FUNC_LN = 40 + FUNC_SIN = 41 + FUNC_COS = 42 + FUNC_TAN = 43 + FUNC_CSC = 44 + FUNC_SEC = 45 + FUNC_COT = 46 + FUNC_ARCSIN = 47 + FUNC_ARCCOS = 48 + FUNC_ARCTAN = 49 + FUNC_ARCCSC = 50 + FUNC_ARCSEC = 51 + FUNC_ARCCOT = 52 + FUNC_SINH = 53 + FUNC_COSH = 54 + FUNC_TANH = 55 + FUNC_ARSINH = 56 + FUNC_ARCOSH = 57 + FUNC_ARTANH = 58 + L_FLOOR = 59 + R_FLOOR = 60 + L_CEIL = 61 + R_CEIL = 62 + FUNC_SQRT = 63 + FUNC_OVERLINE = 64 + CMD_TIMES = 65 + CMD_CDOT = 66 + CMD_DIV = 67 + CMD_FRAC = 68 + CMD_BINOM = 69 + CMD_DBINOM = 70 + CMD_TBINOM = 71 + CMD_MATHIT = 72 + UNDERSCORE = 73 + CARET = 74 + COLON = 75 + DIFFERENTIAL = 76 + LETTER = 77 + DIGIT = 78 + EQUAL = 79 + NEQ = 80 + LT = 81 + LTE = 82 + LTE_Q = 83 + LTE_S = 84 + GT = 85 + GTE = 86 + GTE_Q = 87 + GTE_S = 88 + BANG = 89 + SINGLE_QUOTES = 90 + SYMBOL = 91 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "", + "','", "'.'", "'\\quad'", "'\\qquad'", "'\\negmedspace'", "'\\negthickspace'", + "'\\left'", "'\\right'", "'+'", "'-'", "'*'", "'/'", "'('", + "')'", "'{'", "'}'", "'\\{'", "'\\}'", "'['", "']'", "'|'", + "'\\right|'", "'\\left|'", "'\\langle'", "'\\rangle'", "'\\lim'", + "'\\sum'", "'\\prod'", "'\\exp'", "'\\log'", "'\\lg'", "'\\ln'", + "'\\sin'", "'\\cos'", "'\\tan'", "'\\csc'", "'\\sec'", "'\\cot'", + "'\\arcsin'", "'\\arccos'", "'\\arctan'", "'\\arccsc'", "'\\arcsec'", + "'\\arccot'", "'\\sinh'", "'\\cosh'", "'\\tanh'", "'\\arsinh'", + "'\\arcosh'", "'\\artanh'", "'\\lfloor'", "'\\rfloor'", "'\\lceil'", + "'\\rceil'", "'\\sqrt'", "'\\overline'", "'\\times'", "'\\cdot'", + "'\\div'", "'\\binom'", "'\\dbinom'", "'\\tbinom'", "'\\mathit'", + "'_'", "'^'", "':'", "'\\neq'", "'<'", "'\\leqq'", "'\\leqslant'", + "'>'", "'\\geqq'", "'\\geqslant'", "'!'" ] + + symbolicNames = [ "", + "WS", "THINSPACE", "MEDSPACE", "THICKSPACE", "QUAD", "QQUAD", + "NEGTHINSPACE", "NEGMEDSPACE", "NEGTHICKSPACE", "CMD_LEFT", + "CMD_RIGHT", "IGNORE", "ADD", "SUB", "MUL", "DIV", "L_PAREN", + "R_PAREN", "L_BRACE", "R_BRACE", "L_BRACE_LITERAL", "R_BRACE_LITERAL", + "L_BRACKET", "R_BRACKET", "BAR", "R_BAR", "L_BAR", "L_ANGLE", + "R_ANGLE", "FUNC_LIM", "LIM_APPROACH_SYM", "FUNC_INT", "FUNC_SUM", + "FUNC_PROD", "FUNC_EXP", "FUNC_LOG", "FUNC_LG", "FUNC_LN", "FUNC_SIN", + "FUNC_COS", "FUNC_TAN", "FUNC_CSC", "FUNC_SEC", "FUNC_COT", + "FUNC_ARCSIN", "FUNC_ARCCOS", "FUNC_ARCTAN", "FUNC_ARCCSC", + "FUNC_ARCSEC", "FUNC_ARCCOT", "FUNC_SINH", "FUNC_COSH", "FUNC_TANH", + "FUNC_ARSINH", "FUNC_ARCOSH", "FUNC_ARTANH", "L_FLOOR", "R_FLOOR", + "L_CEIL", "R_CEIL", "FUNC_SQRT", "FUNC_OVERLINE", "CMD_TIMES", + "CMD_CDOT", "CMD_DIV", "CMD_FRAC", "CMD_BINOM", "CMD_DBINOM", + "CMD_TBINOM", "CMD_MATHIT", "UNDERSCORE", "CARET", "COLON", + "DIFFERENTIAL", "LETTER", "DIGIT", "EQUAL", "NEQ", "LT", "LTE", + "LTE_Q", "LTE_S", "GT", "GTE", "GTE_Q", "GTE_S", "BANG", "SINGLE_QUOTES", + "SYMBOL" ] + + ruleNames = [ "T__0", "T__1", "WS", "THINSPACE", "MEDSPACE", "THICKSPACE", + "QUAD", "QQUAD", "NEGTHINSPACE", "NEGMEDSPACE", "NEGTHICKSPACE", + "CMD_LEFT", "CMD_RIGHT", "IGNORE", "ADD", "SUB", "MUL", + "DIV", "L_PAREN", "R_PAREN", "L_BRACE", "R_BRACE", "L_BRACE_LITERAL", + "R_BRACE_LITERAL", "L_BRACKET", "R_BRACKET", "BAR", "R_BAR", + "L_BAR", "L_ANGLE", "R_ANGLE", "FUNC_LIM", "LIM_APPROACH_SYM", + "FUNC_INT", "FUNC_SUM", "FUNC_PROD", "FUNC_EXP", "FUNC_LOG", + "FUNC_LG", "FUNC_LN", "FUNC_SIN", "FUNC_COS", "FUNC_TAN", + "FUNC_CSC", "FUNC_SEC", "FUNC_COT", "FUNC_ARCSIN", "FUNC_ARCCOS", + "FUNC_ARCTAN", "FUNC_ARCCSC", "FUNC_ARCSEC", "FUNC_ARCCOT", + "FUNC_SINH", "FUNC_COSH", "FUNC_TANH", "FUNC_ARSINH", + "FUNC_ARCOSH", "FUNC_ARTANH", "L_FLOOR", "R_FLOOR", "L_CEIL", + "R_CEIL", "FUNC_SQRT", "FUNC_OVERLINE", "CMD_TIMES", "CMD_CDOT", + "CMD_DIV", "CMD_FRAC", "CMD_BINOM", "CMD_DBINOM", "CMD_TBINOM", + "CMD_MATHIT", "UNDERSCORE", "CARET", "COLON", "WS_CHAR", + "DIFFERENTIAL", "LETTER", "DIGIT", "EQUAL", "NEQ", "LT", + "LTE", "LTE_Q", "LTE_S", "GT", "GTE", "GTE_Q", "GTE_S", + "BANG", "SINGLE_QUOTES", "SYMBOL" ] + + grammarFileName = "LaTeX.g4" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.11.1") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None + + diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/latexparser.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/latexparser.py new file mode 100644 index 0000000000000000000000000000000000000000..f6f58119055ded8f77380bbef52c77ddd6a01cfe --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_antlr/latexparser.py @@ -0,0 +1,3652 @@ +# *** GENERATED BY `setup.py antlr`, DO NOT EDIT BY HAND *** +# +# Generated from ../LaTeX.g4, derived from latex2sympy +# latex2sympy is licensed under the MIT license +# https://github.com/augustt198/latex2sympy/blob/master/LICENSE.txt +# +# Generated with antlr4 +# antlr4 is licensed under the BSD-3-Clause License +# https://github.com/antlr/antlr4/blob/master/LICENSE.txt +from antlr4 import * +from io import StringIO +import sys +if sys.version_info[1] > 5: + from typing import TextIO +else: + from typing.io import TextIO + +def serializedATN(): + return [ + 4,1,91,522,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, + 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, + 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, + 2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,31,7,31,2,32,7,32,2,33, + 7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,7,38,2,39,7,39, + 2,40,7,40,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,5,1,91,8,1,10,1,12,1,94, + 9,1,1,2,1,2,1,2,1,2,1,3,1,3,1,4,1,4,1,4,1,4,1,4,1,4,5,4,108,8,4, + 10,4,12,4,111,9,4,1,5,1,5,1,5,1,5,1,5,1,5,5,5,119,8,5,10,5,12,5, + 122,9,5,1,6,1,6,1,6,1,6,1,6,1,6,5,6,130,8,6,10,6,12,6,133,9,6,1, + 7,1,7,1,7,4,7,138,8,7,11,7,12,7,139,3,7,142,8,7,1,8,1,8,1,8,1,8, + 5,8,148,8,8,10,8,12,8,151,9,8,3,8,153,8,8,1,9,1,9,5,9,157,8,9,10, + 9,12,9,160,9,9,1,10,1,10,5,10,164,8,10,10,10,12,10,167,9,10,1,11, + 1,11,3,11,171,8,11,1,12,1,12,1,12,1,12,1,12,1,12,3,12,179,8,12,1, + 13,1,13,1,13,1,13,3,13,185,8,13,1,13,1,13,1,14,1,14,1,14,1,14,3, + 14,193,8,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15,1,15,1,15,1,15,1, + 15,1,15,3,15,207,8,15,1,15,3,15,210,8,15,5,15,212,8,15,10,15,12, + 15,215,9,15,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,16,3, + 16,227,8,16,1,16,3,16,230,8,16,5,16,232,8,16,10,16,12,16,235,9,16, + 1,17,1,17,1,17,1,17,1,17,1,17,3,17,243,8,17,1,18,1,18,1,18,1,18, + 1,18,3,18,250,8,18,1,19,1,19,1,19,1,19,1,19,1,19,1,19,1,19,1,19, + 1,19,1,19,1,19,1,19,1,19,1,19,1,19,3,19,268,8,19,1,20,1,20,1,20, + 1,20,1,21,4,21,275,8,21,11,21,12,21,276,1,21,1,21,1,21,1,21,5,21, + 283,8,21,10,21,12,21,286,9,21,1,21,1,21,4,21,290,8,21,11,21,12,21, + 291,3,21,294,8,21,1,22,1,22,3,22,298,8,22,1,22,3,22,301,8,22,1,22, + 3,22,304,8,22,1,22,3,22,307,8,22,3,22,309,8,22,1,22,1,22,1,22,1, + 22,1,22,1,22,1,22,3,22,318,8,22,1,23,1,23,1,23,1,23,1,24,1,24,1, + 24,1,24,1,25,1,25,1,25,1,25,1,25,1,26,5,26,334,8,26,10,26,12,26, + 337,9,26,1,27,1,27,1,27,1,27,1,27,1,27,3,27,345,8,27,1,27,1,27,1, + 27,1,27,1,27,3,27,352,8,27,1,28,1,28,1,28,1,28,1,28,1,28,1,28,1, + 28,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,31,1,31,1,32,1,32,3, + 32,374,8,32,1,32,3,32,377,8,32,1,32,3,32,380,8,32,1,32,3,32,383, + 8,32,3,32,385,8,32,1,32,1,32,1,32,1,32,1,32,3,32,392,8,32,1,32,1, + 32,3,32,396,8,32,1,32,3,32,399,8,32,1,32,3,32,402,8,32,1,32,3,32, + 405,8,32,3,32,407,8,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1, + 32,1,32,1,32,3,32,420,8,32,1,32,3,32,423,8,32,1,32,1,32,1,32,3,32, + 428,8,32,1,32,1,32,1,32,1,32,1,32,3,32,435,8,32,1,32,1,32,1,32,1, + 32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,3, + 32,453,8,32,1,32,1,32,1,32,1,32,1,32,1,32,3,32,461,8,32,1,33,1,33, + 1,33,1,33,1,33,3,33,468,8,33,1,34,1,34,1,34,1,34,1,34,1,34,1,34, + 1,34,1,34,1,34,1,34,3,34,481,8,34,3,34,483,8,34,1,34,1,34,1,35,1, + 35,1,35,1,35,1,35,3,35,492,8,35,1,36,1,36,1,37,1,37,1,37,1,37,1, + 37,1,37,3,37,502,8,37,1,38,1,38,1,38,1,38,1,38,1,38,3,38,510,8,38, + 1,39,1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,0,6,2,8,10, + 12,30,32,41,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36, + 38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80, + 0,9,2,0,79,82,85,86,1,0,15,16,3,0,17,18,65,67,75,75,2,0,77,77,91, + 91,1,0,27,28,2,0,27,27,29,29,1,0,69,71,1,0,37,58,1,0,35,36,563,0, + 82,1,0,0,0,2,84,1,0,0,0,4,95,1,0,0,0,6,99,1,0,0,0,8,101,1,0,0,0, + 10,112,1,0,0,0,12,123,1,0,0,0,14,141,1,0,0,0,16,152,1,0,0,0,18,154, + 1,0,0,0,20,161,1,0,0,0,22,170,1,0,0,0,24,172,1,0,0,0,26,180,1,0, + 0,0,28,188,1,0,0,0,30,196,1,0,0,0,32,216,1,0,0,0,34,242,1,0,0,0, + 36,249,1,0,0,0,38,267,1,0,0,0,40,269,1,0,0,0,42,274,1,0,0,0,44,317, + 1,0,0,0,46,319,1,0,0,0,48,323,1,0,0,0,50,327,1,0,0,0,52,335,1,0, + 0,0,54,338,1,0,0,0,56,353,1,0,0,0,58,361,1,0,0,0,60,365,1,0,0,0, + 62,369,1,0,0,0,64,460,1,0,0,0,66,467,1,0,0,0,68,469,1,0,0,0,70,491, + 1,0,0,0,72,493,1,0,0,0,74,495,1,0,0,0,76,503,1,0,0,0,78,511,1,0, + 0,0,80,516,1,0,0,0,82,83,3,2,1,0,83,1,1,0,0,0,84,85,6,1,-1,0,85, + 86,3,6,3,0,86,92,1,0,0,0,87,88,10,2,0,0,88,89,7,0,0,0,89,91,3,2, + 1,3,90,87,1,0,0,0,91,94,1,0,0,0,92,90,1,0,0,0,92,93,1,0,0,0,93,3, + 1,0,0,0,94,92,1,0,0,0,95,96,3,6,3,0,96,97,5,79,0,0,97,98,3,6,3,0, + 98,5,1,0,0,0,99,100,3,8,4,0,100,7,1,0,0,0,101,102,6,4,-1,0,102,103, + 3,10,5,0,103,109,1,0,0,0,104,105,10,2,0,0,105,106,7,1,0,0,106,108, + 3,8,4,3,107,104,1,0,0,0,108,111,1,0,0,0,109,107,1,0,0,0,109,110, + 1,0,0,0,110,9,1,0,0,0,111,109,1,0,0,0,112,113,6,5,-1,0,113,114,3, + 14,7,0,114,120,1,0,0,0,115,116,10,2,0,0,116,117,7,2,0,0,117,119, + 3,10,5,3,118,115,1,0,0,0,119,122,1,0,0,0,120,118,1,0,0,0,120,121, + 1,0,0,0,121,11,1,0,0,0,122,120,1,0,0,0,123,124,6,6,-1,0,124,125, + 3,16,8,0,125,131,1,0,0,0,126,127,10,2,0,0,127,128,7,2,0,0,128,130, + 3,12,6,3,129,126,1,0,0,0,130,133,1,0,0,0,131,129,1,0,0,0,131,132, + 1,0,0,0,132,13,1,0,0,0,133,131,1,0,0,0,134,135,7,1,0,0,135,142,3, + 14,7,0,136,138,3,18,9,0,137,136,1,0,0,0,138,139,1,0,0,0,139,137, + 1,0,0,0,139,140,1,0,0,0,140,142,1,0,0,0,141,134,1,0,0,0,141,137, + 1,0,0,0,142,15,1,0,0,0,143,144,7,1,0,0,144,153,3,16,8,0,145,149, + 3,18,9,0,146,148,3,20,10,0,147,146,1,0,0,0,148,151,1,0,0,0,149,147, + 1,0,0,0,149,150,1,0,0,0,150,153,1,0,0,0,151,149,1,0,0,0,152,143, + 1,0,0,0,152,145,1,0,0,0,153,17,1,0,0,0,154,158,3,30,15,0,155,157, + 3,22,11,0,156,155,1,0,0,0,157,160,1,0,0,0,158,156,1,0,0,0,158,159, + 1,0,0,0,159,19,1,0,0,0,160,158,1,0,0,0,161,165,3,32,16,0,162,164, + 3,22,11,0,163,162,1,0,0,0,164,167,1,0,0,0,165,163,1,0,0,0,165,166, + 1,0,0,0,166,21,1,0,0,0,167,165,1,0,0,0,168,171,5,89,0,0,169,171, + 3,24,12,0,170,168,1,0,0,0,170,169,1,0,0,0,171,23,1,0,0,0,172,178, + 5,27,0,0,173,179,3,28,14,0,174,179,3,26,13,0,175,176,3,28,14,0,176, + 177,3,26,13,0,177,179,1,0,0,0,178,173,1,0,0,0,178,174,1,0,0,0,178, + 175,1,0,0,0,179,25,1,0,0,0,180,181,5,73,0,0,181,184,5,21,0,0,182, + 185,3,6,3,0,183,185,3,4,2,0,184,182,1,0,0,0,184,183,1,0,0,0,185, + 186,1,0,0,0,186,187,5,22,0,0,187,27,1,0,0,0,188,189,5,74,0,0,189, + 192,5,21,0,0,190,193,3,6,3,0,191,193,3,4,2,0,192,190,1,0,0,0,192, + 191,1,0,0,0,193,194,1,0,0,0,194,195,5,22,0,0,195,29,1,0,0,0,196, + 197,6,15,-1,0,197,198,3,34,17,0,198,213,1,0,0,0,199,200,10,2,0,0, + 200,206,5,74,0,0,201,207,3,44,22,0,202,203,5,21,0,0,203,204,3,6, + 3,0,204,205,5,22,0,0,205,207,1,0,0,0,206,201,1,0,0,0,206,202,1,0, + 0,0,207,209,1,0,0,0,208,210,3,74,37,0,209,208,1,0,0,0,209,210,1, + 0,0,0,210,212,1,0,0,0,211,199,1,0,0,0,212,215,1,0,0,0,213,211,1, + 0,0,0,213,214,1,0,0,0,214,31,1,0,0,0,215,213,1,0,0,0,216,217,6,16, + -1,0,217,218,3,36,18,0,218,233,1,0,0,0,219,220,10,2,0,0,220,226, + 5,74,0,0,221,227,3,44,22,0,222,223,5,21,0,0,223,224,3,6,3,0,224, + 225,5,22,0,0,225,227,1,0,0,0,226,221,1,0,0,0,226,222,1,0,0,0,227, + 229,1,0,0,0,228,230,3,74,37,0,229,228,1,0,0,0,229,230,1,0,0,0,230, + 232,1,0,0,0,231,219,1,0,0,0,232,235,1,0,0,0,233,231,1,0,0,0,233, + 234,1,0,0,0,234,33,1,0,0,0,235,233,1,0,0,0,236,243,3,38,19,0,237, + 243,3,40,20,0,238,243,3,64,32,0,239,243,3,44,22,0,240,243,3,58,29, + 0,241,243,3,60,30,0,242,236,1,0,0,0,242,237,1,0,0,0,242,238,1,0, + 0,0,242,239,1,0,0,0,242,240,1,0,0,0,242,241,1,0,0,0,243,35,1,0,0, + 0,244,250,3,38,19,0,245,250,3,40,20,0,246,250,3,44,22,0,247,250, + 3,58,29,0,248,250,3,60,30,0,249,244,1,0,0,0,249,245,1,0,0,0,249, + 246,1,0,0,0,249,247,1,0,0,0,249,248,1,0,0,0,250,37,1,0,0,0,251,252, + 5,19,0,0,252,253,3,6,3,0,253,254,5,20,0,0,254,268,1,0,0,0,255,256, + 5,25,0,0,256,257,3,6,3,0,257,258,5,26,0,0,258,268,1,0,0,0,259,260, + 5,21,0,0,260,261,3,6,3,0,261,262,5,22,0,0,262,268,1,0,0,0,263,264, + 5,23,0,0,264,265,3,6,3,0,265,266,5,24,0,0,266,268,1,0,0,0,267,251, + 1,0,0,0,267,255,1,0,0,0,267,259,1,0,0,0,267,263,1,0,0,0,268,39,1, + 0,0,0,269,270,5,27,0,0,270,271,3,6,3,0,271,272,5,27,0,0,272,41,1, + 0,0,0,273,275,5,78,0,0,274,273,1,0,0,0,275,276,1,0,0,0,276,274,1, + 0,0,0,276,277,1,0,0,0,277,284,1,0,0,0,278,279,5,1,0,0,279,280,5, + 78,0,0,280,281,5,78,0,0,281,283,5,78,0,0,282,278,1,0,0,0,283,286, + 1,0,0,0,284,282,1,0,0,0,284,285,1,0,0,0,285,293,1,0,0,0,286,284, + 1,0,0,0,287,289,5,2,0,0,288,290,5,78,0,0,289,288,1,0,0,0,290,291, + 1,0,0,0,291,289,1,0,0,0,291,292,1,0,0,0,292,294,1,0,0,0,293,287, + 1,0,0,0,293,294,1,0,0,0,294,43,1,0,0,0,295,308,7,3,0,0,296,298,3, + 74,37,0,297,296,1,0,0,0,297,298,1,0,0,0,298,300,1,0,0,0,299,301, + 5,90,0,0,300,299,1,0,0,0,300,301,1,0,0,0,301,309,1,0,0,0,302,304, + 5,90,0,0,303,302,1,0,0,0,303,304,1,0,0,0,304,306,1,0,0,0,305,307, + 3,74,37,0,306,305,1,0,0,0,306,307,1,0,0,0,307,309,1,0,0,0,308,297, + 1,0,0,0,308,303,1,0,0,0,309,318,1,0,0,0,310,318,3,42,21,0,311,318, + 5,76,0,0,312,318,3,50,25,0,313,318,3,54,27,0,314,318,3,56,28,0,315, + 318,3,46,23,0,316,318,3,48,24,0,317,295,1,0,0,0,317,310,1,0,0,0, + 317,311,1,0,0,0,317,312,1,0,0,0,317,313,1,0,0,0,317,314,1,0,0,0, + 317,315,1,0,0,0,317,316,1,0,0,0,318,45,1,0,0,0,319,320,5,30,0,0, + 320,321,3,6,3,0,321,322,7,4,0,0,322,47,1,0,0,0,323,324,7,5,0,0,324, + 325,3,6,3,0,325,326,5,31,0,0,326,49,1,0,0,0,327,328,5,72,0,0,328, + 329,5,21,0,0,329,330,3,52,26,0,330,331,5,22,0,0,331,51,1,0,0,0,332, + 334,5,77,0,0,333,332,1,0,0,0,334,337,1,0,0,0,335,333,1,0,0,0,335, + 336,1,0,0,0,336,53,1,0,0,0,337,335,1,0,0,0,338,344,5,68,0,0,339, + 345,5,78,0,0,340,341,5,21,0,0,341,342,3,6,3,0,342,343,5,22,0,0,343, + 345,1,0,0,0,344,339,1,0,0,0,344,340,1,0,0,0,345,351,1,0,0,0,346, + 352,5,78,0,0,347,348,5,21,0,0,348,349,3,6,3,0,349,350,5,22,0,0,350, + 352,1,0,0,0,351,346,1,0,0,0,351,347,1,0,0,0,352,55,1,0,0,0,353,354, + 7,6,0,0,354,355,5,21,0,0,355,356,3,6,3,0,356,357,5,22,0,0,357,358, + 5,21,0,0,358,359,3,6,3,0,359,360,5,22,0,0,360,57,1,0,0,0,361,362, + 5,59,0,0,362,363,3,6,3,0,363,364,5,60,0,0,364,59,1,0,0,0,365,366, + 5,61,0,0,366,367,3,6,3,0,367,368,5,62,0,0,368,61,1,0,0,0,369,370, + 7,7,0,0,370,63,1,0,0,0,371,384,3,62,31,0,372,374,3,74,37,0,373,372, + 1,0,0,0,373,374,1,0,0,0,374,376,1,0,0,0,375,377,3,76,38,0,376,375, + 1,0,0,0,376,377,1,0,0,0,377,385,1,0,0,0,378,380,3,76,38,0,379,378, + 1,0,0,0,379,380,1,0,0,0,380,382,1,0,0,0,381,383,3,74,37,0,382,381, + 1,0,0,0,382,383,1,0,0,0,383,385,1,0,0,0,384,373,1,0,0,0,384,379, + 1,0,0,0,385,391,1,0,0,0,386,387,5,19,0,0,387,388,3,70,35,0,388,389, + 5,20,0,0,389,392,1,0,0,0,390,392,3,72,36,0,391,386,1,0,0,0,391,390, + 1,0,0,0,392,461,1,0,0,0,393,406,7,3,0,0,394,396,3,74,37,0,395,394, + 1,0,0,0,395,396,1,0,0,0,396,398,1,0,0,0,397,399,5,90,0,0,398,397, + 1,0,0,0,398,399,1,0,0,0,399,407,1,0,0,0,400,402,5,90,0,0,401,400, + 1,0,0,0,401,402,1,0,0,0,402,404,1,0,0,0,403,405,3,74,37,0,404,403, + 1,0,0,0,404,405,1,0,0,0,405,407,1,0,0,0,406,395,1,0,0,0,406,401, + 1,0,0,0,407,408,1,0,0,0,408,409,5,19,0,0,409,410,3,66,33,0,410,411, + 5,20,0,0,411,461,1,0,0,0,412,419,5,34,0,0,413,414,3,74,37,0,414, + 415,3,76,38,0,415,420,1,0,0,0,416,417,3,76,38,0,417,418,3,74,37, + 0,418,420,1,0,0,0,419,413,1,0,0,0,419,416,1,0,0,0,419,420,1,0,0, + 0,420,427,1,0,0,0,421,423,3,8,4,0,422,421,1,0,0,0,422,423,1,0,0, + 0,423,424,1,0,0,0,424,428,5,76,0,0,425,428,3,54,27,0,426,428,3,8, + 4,0,427,422,1,0,0,0,427,425,1,0,0,0,427,426,1,0,0,0,428,461,1,0, + 0,0,429,434,5,63,0,0,430,431,5,25,0,0,431,432,3,6,3,0,432,433,5, + 26,0,0,433,435,1,0,0,0,434,430,1,0,0,0,434,435,1,0,0,0,435,436,1, + 0,0,0,436,437,5,21,0,0,437,438,3,6,3,0,438,439,5,22,0,0,439,461, + 1,0,0,0,440,441,5,64,0,0,441,442,5,21,0,0,442,443,3,6,3,0,443,444, + 5,22,0,0,444,461,1,0,0,0,445,452,7,8,0,0,446,447,3,78,39,0,447,448, + 3,76,38,0,448,453,1,0,0,0,449,450,3,76,38,0,450,451,3,78,39,0,451, + 453,1,0,0,0,452,446,1,0,0,0,452,449,1,0,0,0,453,454,1,0,0,0,454, + 455,3,10,5,0,455,461,1,0,0,0,456,457,5,32,0,0,457,458,3,68,34,0, + 458,459,3,10,5,0,459,461,1,0,0,0,460,371,1,0,0,0,460,393,1,0,0,0, + 460,412,1,0,0,0,460,429,1,0,0,0,460,440,1,0,0,0,460,445,1,0,0,0, + 460,456,1,0,0,0,461,65,1,0,0,0,462,463,3,6,3,0,463,464,5,1,0,0,464, + 465,3,66,33,0,465,468,1,0,0,0,466,468,3,6,3,0,467,462,1,0,0,0,467, + 466,1,0,0,0,468,67,1,0,0,0,469,470,5,73,0,0,470,471,5,21,0,0,471, + 472,7,3,0,0,472,473,5,33,0,0,473,482,3,6,3,0,474,480,5,74,0,0,475, + 476,5,21,0,0,476,477,7,1,0,0,477,481,5,22,0,0,478,481,5,15,0,0,479, + 481,5,16,0,0,480,475,1,0,0,0,480,478,1,0,0,0,480,479,1,0,0,0,481, + 483,1,0,0,0,482,474,1,0,0,0,482,483,1,0,0,0,483,484,1,0,0,0,484, + 485,5,22,0,0,485,69,1,0,0,0,486,492,3,6,3,0,487,488,3,6,3,0,488, + 489,5,1,0,0,489,490,3,70,35,0,490,492,1,0,0,0,491,486,1,0,0,0,491, + 487,1,0,0,0,492,71,1,0,0,0,493,494,3,12,6,0,494,73,1,0,0,0,495,501, + 5,73,0,0,496,502,3,44,22,0,497,498,5,21,0,0,498,499,3,6,3,0,499, + 500,5,22,0,0,500,502,1,0,0,0,501,496,1,0,0,0,501,497,1,0,0,0,502, + 75,1,0,0,0,503,509,5,74,0,0,504,510,3,44,22,0,505,506,5,21,0,0,506, + 507,3,6,3,0,507,508,5,22,0,0,508,510,1,0,0,0,509,504,1,0,0,0,509, + 505,1,0,0,0,510,77,1,0,0,0,511,512,5,73,0,0,512,513,5,21,0,0,513, + 514,3,4,2,0,514,515,5,22,0,0,515,79,1,0,0,0,516,517,5,73,0,0,517, + 518,5,21,0,0,518,519,3,4,2,0,519,520,5,22,0,0,520,81,1,0,0,0,59, + 92,109,120,131,139,141,149,152,158,165,170,178,184,192,206,209,213, + 226,229,233,242,249,267,276,284,291,293,297,300,303,306,308,317, + 335,344,351,373,376,379,382,384,391,395,398,401,404,406,419,422, + 427,434,452,460,467,480,482,491,501,509 + ] + +class LaTeXParser ( Parser ): + + grammarFileName = "LaTeX.g4" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "", "','", "'.'", "", "", + "", "", "'\\quad'", "'\\qquad'", + "", "'\\negmedspace'", "'\\negthickspace'", + "'\\left'", "'\\right'", "", "'+'", "'-'", + "'*'", "'/'", "'('", "')'", "'{'", "'}'", "'\\{'", + "'\\}'", "'['", "']'", "'|'", "'\\right|'", "'\\left|'", + "'\\langle'", "'\\rangle'", "'\\lim'", "", + "", "'\\sum'", "'\\prod'", "'\\exp'", "'\\log'", + "'\\lg'", "'\\ln'", "'\\sin'", "'\\cos'", "'\\tan'", + "'\\csc'", "'\\sec'", "'\\cot'", "'\\arcsin'", "'\\arccos'", + "'\\arctan'", "'\\arccsc'", "'\\arcsec'", "'\\arccot'", + "'\\sinh'", "'\\cosh'", "'\\tanh'", "'\\arsinh'", "'\\arcosh'", + "'\\artanh'", "'\\lfloor'", "'\\rfloor'", "'\\lceil'", + "'\\rceil'", "'\\sqrt'", "'\\overline'", "'\\times'", + "'\\cdot'", "'\\div'", "", "'\\binom'", "'\\dbinom'", + "'\\tbinom'", "'\\mathit'", "'_'", "'^'", "':'", "", + "", "", "", "'\\neq'", "'<'", + "", "'\\leqq'", "'\\leqslant'", "'>'", "", + "'\\geqq'", "'\\geqslant'", "'!'" ] + + symbolicNames = [ "", "", "", "WS", "THINSPACE", + "MEDSPACE", "THICKSPACE", "QUAD", "QQUAD", "NEGTHINSPACE", + "NEGMEDSPACE", "NEGTHICKSPACE", "CMD_LEFT", "CMD_RIGHT", + "IGNORE", "ADD", "SUB", "MUL", "DIV", "L_PAREN", "R_PAREN", + "L_BRACE", "R_BRACE", "L_BRACE_LITERAL", "R_BRACE_LITERAL", + "L_BRACKET", "R_BRACKET", "BAR", "R_BAR", "L_BAR", + "L_ANGLE", "R_ANGLE", "FUNC_LIM", "LIM_APPROACH_SYM", + "FUNC_INT", "FUNC_SUM", "FUNC_PROD", "FUNC_EXP", "FUNC_LOG", + "FUNC_LG", "FUNC_LN", "FUNC_SIN", "FUNC_COS", "FUNC_TAN", + "FUNC_CSC", "FUNC_SEC", "FUNC_COT", "FUNC_ARCSIN", + "FUNC_ARCCOS", "FUNC_ARCTAN", "FUNC_ARCCSC", "FUNC_ARCSEC", + "FUNC_ARCCOT", "FUNC_SINH", "FUNC_COSH", "FUNC_TANH", + "FUNC_ARSINH", "FUNC_ARCOSH", "FUNC_ARTANH", "L_FLOOR", + "R_FLOOR", "L_CEIL", "R_CEIL", "FUNC_SQRT", "FUNC_OVERLINE", + "CMD_TIMES", "CMD_CDOT", "CMD_DIV", "CMD_FRAC", "CMD_BINOM", + "CMD_DBINOM", "CMD_TBINOM", "CMD_MATHIT", "UNDERSCORE", + "CARET", "COLON", "DIFFERENTIAL", "LETTER", "DIGIT", + "EQUAL", "NEQ", "LT", "LTE", "LTE_Q", "LTE_S", "GT", + "GTE", "GTE_Q", "GTE_S", "BANG", "SINGLE_QUOTES", + "SYMBOL" ] + + RULE_math = 0 + RULE_relation = 1 + RULE_equality = 2 + RULE_expr = 3 + RULE_additive = 4 + RULE_mp = 5 + RULE_mp_nofunc = 6 + RULE_unary = 7 + RULE_unary_nofunc = 8 + RULE_postfix = 9 + RULE_postfix_nofunc = 10 + RULE_postfix_op = 11 + RULE_eval_at = 12 + RULE_eval_at_sub = 13 + RULE_eval_at_sup = 14 + RULE_exp = 15 + RULE_exp_nofunc = 16 + RULE_comp = 17 + RULE_comp_nofunc = 18 + RULE_group = 19 + RULE_abs_group = 20 + RULE_number = 21 + RULE_atom = 22 + RULE_bra = 23 + RULE_ket = 24 + RULE_mathit = 25 + RULE_mathit_text = 26 + RULE_frac = 27 + RULE_binom = 28 + RULE_floor = 29 + RULE_ceil = 30 + RULE_func_normal = 31 + RULE_func = 32 + RULE_args = 33 + RULE_limit_sub = 34 + RULE_func_arg = 35 + RULE_func_arg_noparens = 36 + RULE_subexpr = 37 + RULE_supexpr = 38 + RULE_subeq = 39 + RULE_supeq = 40 + + ruleNames = [ "math", "relation", "equality", "expr", "additive", "mp", + "mp_nofunc", "unary", "unary_nofunc", "postfix", "postfix_nofunc", + "postfix_op", "eval_at", "eval_at_sub", "eval_at_sup", + "exp", "exp_nofunc", "comp", "comp_nofunc", "group", + "abs_group", "number", "atom", "bra", "ket", "mathit", + "mathit_text", "frac", "binom", "floor", "ceil", "func_normal", + "func", "args", "limit_sub", "func_arg", "func_arg_noparens", + "subexpr", "supexpr", "subeq", "supeq" ] + + EOF = Token.EOF + T__0=1 + T__1=2 + WS=3 + THINSPACE=4 + MEDSPACE=5 + THICKSPACE=6 + QUAD=7 + QQUAD=8 + NEGTHINSPACE=9 + NEGMEDSPACE=10 + NEGTHICKSPACE=11 + CMD_LEFT=12 + CMD_RIGHT=13 + IGNORE=14 + ADD=15 + SUB=16 + MUL=17 + DIV=18 + L_PAREN=19 + R_PAREN=20 + L_BRACE=21 + R_BRACE=22 + L_BRACE_LITERAL=23 + R_BRACE_LITERAL=24 + L_BRACKET=25 + R_BRACKET=26 + BAR=27 + R_BAR=28 + L_BAR=29 + L_ANGLE=30 + R_ANGLE=31 + FUNC_LIM=32 + LIM_APPROACH_SYM=33 + FUNC_INT=34 + FUNC_SUM=35 + FUNC_PROD=36 + FUNC_EXP=37 + FUNC_LOG=38 + FUNC_LG=39 + FUNC_LN=40 + FUNC_SIN=41 + FUNC_COS=42 + FUNC_TAN=43 + FUNC_CSC=44 + FUNC_SEC=45 + FUNC_COT=46 + FUNC_ARCSIN=47 + FUNC_ARCCOS=48 + FUNC_ARCTAN=49 + FUNC_ARCCSC=50 + FUNC_ARCSEC=51 + FUNC_ARCCOT=52 + FUNC_SINH=53 + FUNC_COSH=54 + FUNC_TANH=55 + FUNC_ARSINH=56 + FUNC_ARCOSH=57 + FUNC_ARTANH=58 + L_FLOOR=59 + R_FLOOR=60 + L_CEIL=61 + R_CEIL=62 + FUNC_SQRT=63 + FUNC_OVERLINE=64 + CMD_TIMES=65 + CMD_CDOT=66 + CMD_DIV=67 + CMD_FRAC=68 + CMD_BINOM=69 + CMD_DBINOM=70 + CMD_TBINOM=71 + CMD_MATHIT=72 + UNDERSCORE=73 + CARET=74 + COLON=75 + DIFFERENTIAL=76 + LETTER=77 + DIGIT=78 + EQUAL=79 + NEQ=80 + LT=81 + LTE=82 + LTE_Q=83 + LTE_S=84 + GT=85 + GTE=86 + GTE_Q=87 + GTE_S=88 + BANG=89 + SINGLE_QUOTES=90 + SYMBOL=91 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.11.1") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + + class MathContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def relation(self): + return self.getTypedRuleContext(LaTeXParser.RelationContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_math + + + + + def math(self): + + localctx = LaTeXParser.MathContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_math) + try: + self.enterOuterAlt(localctx, 1) + self.state = 82 + self.relation(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class RelationContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def relation(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.RelationContext) + else: + return self.getTypedRuleContext(LaTeXParser.RelationContext,i) + + + def EQUAL(self): + return self.getToken(LaTeXParser.EQUAL, 0) + + def LT(self): + return self.getToken(LaTeXParser.LT, 0) + + def LTE(self): + return self.getToken(LaTeXParser.LTE, 0) + + def GT(self): + return self.getToken(LaTeXParser.GT, 0) + + def GTE(self): + return self.getToken(LaTeXParser.GTE, 0) + + def NEQ(self): + return self.getToken(LaTeXParser.NEQ, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_relation + + + + def relation(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = LaTeXParser.RelationContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 2 + self.enterRecursionRule(localctx, 2, self.RULE_relation, _p) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 85 + self.expr() + self._ctx.stop = self._input.LT(-1) + self.state = 92 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,0,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + localctx = LaTeXParser.RelationContext(self, _parentctx, _parentState) + self.pushNewRecursionContext(localctx, _startState, self.RULE_relation) + self.state = 87 + if not self.precpred(self._ctx, 2): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") + self.state = 88 + _la = self._input.LA(1) + if not((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & 207) != 0): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 89 + self.relation(3) + self.state = 94 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,0,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class EqualityContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.ExprContext) + else: + return self.getTypedRuleContext(LaTeXParser.ExprContext,i) + + + def EQUAL(self): + return self.getToken(LaTeXParser.EQUAL, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_equality + + + + + def equality(self): + + localctx = LaTeXParser.EqualityContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_equality) + try: + self.enterOuterAlt(localctx, 1) + self.state = 95 + self.expr() + self.state = 96 + self.match(LaTeXParser.EQUAL) + self.state = 97 + self.expr() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExprContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def additive(self): + return self.getTypedRuleContext(LaTeXParser.AdditiveContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_expr + + + + + def expr(self): + + localctx = LaTeXParser.ExprContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_expr) + try: + self.enterOuterAlt(localctx, 1) + self.state = 99 + self.additive(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AdditiveContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def mp(self): + return self.getTypedRuleContext(LaTeXParser.MpContext,0) + + + def additive(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.AdditiveContext) + else: + return self.getTypedRuleContext(LaTeXParser.AdditiveContext,i) + + + def ADD(self): + return self.getToken(LaTeXParser.ADD, 0) + + def SUB(self): + return self.getToken(LaTeXParser.SUB, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_additive + + + + def additive(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = LaTeXParser.AdditiveContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 8 + self.enterRecursionRule(localctx, 8, self.RULE_additive, _p) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 102 + self.mp(0) + self._ctx.stop = self._input.LT(-1) + self.state = 109 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + localctx = LaTeXParser.AdditiveContext(self, _parentctx, _parentState) + self.pushNewRecursionContext(localctx, _startState, self.RULE_additive) + self.state = 104 + if not self.precpred(self._ctx, 2): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") + self.state = 105 + _la = self._input.LA(1) + if not(_la==15 or _la==16): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 106 + self.additive(3) + self.state = 111 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class MpContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def unary(self): + return self.getTypedRuleContext(LaTeXParser.UnaryContext,0) + + + def mp(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.MpContext) + else: + return self.getTypedRuleContext(LaTeXParser.MpContext,i) + + + def MUL(self): + return self.getToken(LaTeXParser.MUL, 0) + + def CMD_TIMES(self): + return self.getToken(LaTeXParser.CMD_TIMES, 0) + + def CMD_CDOT(self): + return self.getToken(LaTeXParser.CMD_CDOT, 0) + + def DIV(self): + return self.getToken(LaTeXParser.DIV, 0) + + def CMD_DIV(self): + return self.getToken(LaTeXParser.CMD_DIV, 0) + + def COLON(self): + return self.getToken(LaTeXParser.COLON, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_mp + + + + def mp(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = LaTeXParser.MpContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 10 + self.enterRecursionRule(localctx, 10, self.RULE_mp, _p) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 113 + self.unary() + self._ctx.stop = self._input.LT(-1) + self.state = 120 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,2,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + localctx = LaTeXParser.MpContext(self, _parentctx, _parentState) + self.pushNewRecursionContext(localctx, _startState, self.RULE_mp) + self.state = 115 + if not self.precpred(self._ctx, 2): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") + self.state = 116 + _la = self._input.LA(1) + if not((((_la - 17)) & ~0x3f) == 0 and ((1 << (_la - 17)) & 290200700988686339) != 0): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 117 + self.mp(3) + self.state = 122 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,2,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class Mp_nofuncContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def unary_nofunc(self): + return self.getTypedRuleContext(LaTeXParser.Unary_nofuncContext,0) + + + def mp_nofunc(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.Mp_nofuncContext) + else: + return self.getTypedRuleContext(LaTeXParser.Mp_nofuncContext,i) + + + def MUL(self): + return self.getToken(LaTeXParser.MUL, 0) + + def CMD_TIMES(self): + return self.getToken(LaTeXParser.CMD_TIMES, 0) + + def CMD_CDOT(self): + return self.getToken(LaTeXParser.CMD_CDOT, 0) + + def DIV(self): + return self.getToken(LaTeXParser.DIV, 0) + + def CMD_DIV(self): + return self.getToken(LaTeXParser.CMD_DIV, 0) + + def COLON(self): + return self.getToken(LaTeXParser.COLON, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_mp_nofunc + + + + def mp_nofunc(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = LaTeXParser.Mp_nofuncContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 12 + self.enterRecursionRule(localctx, 12, self.RULE_mp_nofunc, _p) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 124 + self.unary_nofunc() + self._ctx.stop = self._input.LT(-1) + self.state = 131 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,3,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + localctx = LaTeXParser.Mp_nofuncContext(self, _parentctx, _parentState) + self.pushNewRecursionContext(localctx, _startState, self.RULE_mp_nofunc) + self.state = 126 + if not self.precpred(self._ctx, 2): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") + self.state = 127 + _la = self._input.LA(1) + if not((((_la - 17)) & ~0x3f) == 0 and ((1 << (_la - 17)) & 290200700988686339) != 0): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 128 + self.mp_nofunc(3) + self.state = 133 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,3,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class UnaryContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def unary(self): + return self.getTypedRuleContext(LaTeXParser.UnaryContext,0) + + + def ADD(self): + return self.getToken(LaTeXParser.ADD, 0) + + def SUB(self): + return self.getToken(LaTeXParser.SUB, 0) + + def postfix(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.PostfixContext) + else: + return self.getTypedRuleContext(LaTeXParser.PostfixContext,i) + + + def getRuleIndex(self): + return LaTeXParser.RULE_unary + + + + + def unary(self): + + localctx = LaTeXParser.UnaryContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_unary) + self._la = 0 # Token type + try: + self.state = 141 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [15, 16]: + self.enterOuterAlt(localctx, 1) + self.state = 134 + _la = self._input.LA(1) + if not(_la==15 or _la==16): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 135 + self.unary() + pass + elif token in [19, 21, 23, 25, 27, 29, 30, 32, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 63, 64, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + self.enterOuterAlt(localctx, 2) + self.state = 137 + self._errHandler.sync(self) + _alt = 1 + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1: + self.state = 136 + self.postfix() + + else: + raise NoViableAltException(self) + self.state = 139 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,4,self._ctx) + + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Unary_nofuncContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def unary_nofunc(self): + return self.getTypedRuleContext(LaTeXParser.Unary_nofuncContext,0) + + + def ADD(self): + return self.getToken(LaTeXParser.ADD, 0) + + def SUB(self): + return self.getToken(LaTeXParser.SUB, 0) + + def postfix(self): + return self.getTypedRuleContext(LaTeXParser.PostfixContext,0) + + + def postfix_nofunc(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.Postfix_nofuncContext) + else: + return self.getTypedRuleContext(LaTeXParser.Postfix_nofuncContext,i) + + + def getRuleIndex(self): + return LaTeXParser.RULE_unary_nofunc + + + + + def unary_nofunc(self): + + localctx = LaTeXParser.Unary_nofuncContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_unary_nofunc) + self._la = 0 # Token type + try: + self.state = 152 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [15, 16]: + self.enterOuterAlt(localctx, 1) + self.state = 143 + _la = self._input.LA(1) + if not(_la==15 or _la==16): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 144 + self.unary_nofunc() + pass + elif token in [19, 21, 23, 25, 27, 29, 30, 32, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 63, 64, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + self.enterOuterAlt(localctx, 2) + self.state = 145 + self.postfix() + self.state = 149 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,6,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 146 + self.postfix_nofunc() + self.state = 151 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,6,self._ctx) + + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class PostfixContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def exp(self): + return self.getTypedRuleContext(LaTeXParser.ExpContext,0) + + + def postfix_op(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.Postfix_opContext) + else: + return self.getTypedRuleContext(LaTeXParser.Postfix_opContext,i) + + + def getRuleIndex(self): + return LaTeXParser.RULE_postfix + + + + + def postfix(self): + + localctx = LaTeXParser.PostfixContext(self, self._ctx, self.state) + self.enterRule(localctx, 18, self.RULE_postfix) + try: + self.enterOuterAlt(localctx, 1) + self.state = 154 + self.exp(0) + self.state = 158 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,8,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 155 + self.postfix_op() + self.state = 160 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,8,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Postfix_nofuncContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def exp_nofunc(self): + return self.getTypedRuleContext(LaTeXParser.Exp_nofuncContext,0) + + + def postfix_op(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.Postfix_opContext) + else: + return self.getTypedRuleContext(LaTeXParser.Postfix_opContext,i) + + + def getRuleIndex(self): + return LaTeXParser.RULE_postfix_nofunc + + + + + def postfix_nofunc(self): + + localctx = LaTeXParser.Postfix_nofuncContext(self, self._ctx, self.state) + self.enterRule(localctx, 20, self.RULE_postfix_nofunc) + try: + self.enterOuterAlt(localctx, 1) + self.state = 161 + self.exp_nofunc(0) + self.state = 165 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,9,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 162 + self.postfix_op() + self.state = 167 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,9,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Postfix_opContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def BANG(self): + return self.getToken(LaTeXParser.BANG, 0) + + def eval_at(self): + return self.getTypedRuleContext(LaTeXParser.Eval_atContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_postfix_op + + + + + def postfix_op(self): + + localctx = LaTeXParser.Postfix_opContext(self, self._ctx, self.state) + self.enterRule(localctx, 22, self.RULE_postfix_op) + try: + self.state = 170 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [89]: + self.enterOuterAlt(localctx, 1) + self.state = 168 + self.match(LaTeXParser.BANG) + pass + elif token in [27]: + self.enterOuterAlt(localctx, 2) + self.state = 169 + self.eval_at() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Eval_atContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def BAR(self): + return self.getToken(LaTeXParser.BAR, 0) + + def eval_at_sup(self): + return self.getTypedRuleContext(LaTeXParser.Eval_at_supContext,0) + + + def eval_at_sub(self): + return self.getTypedRuleContext(LaTeXParser.Eval_at_subContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_eval_at + + + + + def eval_at(self): + + localctx = LaTeXParser.Eval_atContext(self, self._ctx, self.state) + self.enterRule(localctx, 24, self.RULE_eval_at) + try: + self.enterOuterAlt(localctx, 1) + self.state = 172 + self.match(LaTeXParser.BAR) + self.state = 178 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,11,self._ctx) + if la_ == 1: + self.state = 173 + self.eval_at_sup() + pass + + elif la_ == 2: + self.state = 174 + self.eval_at_sub() + pass + + elif la_ == 3: + self.state = 175 + self.eval_at_sup() + self.state = 176 + self.eval_at_sub() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Eval_at_subContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def UNDERSCORE(self): + return self.getToken(LaTeXParser.UNDERSCORE, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def equality(self): + return self.getTypedRuleContext(LaTeXParser.EqualityContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_eval_at_sub + + + + + def eval_at_sub(self): + + localctx = LaTeXParser.Eval_at_subContext(self, self._ctx, self.state) + self.enterRule(localctx, 26, self.RULE_eval_at_sub) + try: + self.enterOuterAlt(localctx, 1) + self.state = 180 + self.match(LaTeXParser.UNDERSCORE) + self.state = 181 + self.match(LaTeXParser.L_BRACE) + self.state = 184 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,12,self._ctx) + if la_ == 1: + self.state = 182 + self.expr() + pass + + elif la_ == 2: + self.state = 183 + self.equality() + pass + + + self.state = 186 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Eval_at_supContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def CARET(self): + return self.getToken(LaTeXParser.CARET, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def equality(self): + return self.getTypedRuleContext(LaTeXParser.EqualityContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_eval_at_sup + + + + + def eval_at_sup(self): + + localctx = LaTeXParser.Eval_at_supContext(self, self._ctx, self.state) + self.enterRule(localctx, 28, self.RULE_eval_at_sup) + try: + self.enterOuterAlt(localctx, 1) + self.state = 188 + self.match(LaTeXParser.CARET) + self.state = 189 + self.match(LaTeXParser.L_BRACE) + self.state = 192 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,13,self._ctx) + if la_ == 1: + self.state = 190 + self.expr() + pass + + elif la_ == 2: + self.state = 191 + self.equality() + pass + + + self.state = 194 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExpContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def comp(self): + return self.getTypedRuleContext(LaTeXParser.CompContext,0) + + + def exp(self): + return self.getTypedRuleContext(LaTeXParser.ExpContext,0) + + + def CARET(self): + return self.getToken(LaTeXParser.CARET, 0) + + def atom(self): + return self.getTypedRuleContext(LaTeXParser.AtomContext,0) + + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def subexpr(self): + return self.getTypedRuleContext(LaTeXParser.SubexprContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_exp + + + + def exp(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = LaTeXParser.ExpContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 30 + self.enterRecursionRule(localctx, 30, self.RULE_exp, _p) + try: + self.enterOuterAlt(localctx, 1) + self.state = 197 + self.comp() + self._ctx.stop = self._input.LT(-1) + self.state = 213 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,16,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + localctx = LaTeXParser.ExpContext(self, _parentctx, _parentState) + self.pushNewRecursionContext(localctx, _startState, self.RULE_exp) + self.state = 199 + if not self.precpred(self._ctx, 2): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") + self.state = 200 + self.match(LaTeXParser.CARET) + self.state = 206 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [27, 29, 30, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + self.state = 201 + self.atom() + pass + elif token in [21]: + self.state = 202 + self.match(LaTeXParser.L_BRACE) + self.state = 203 + self.expr() + self.state = 204 + self.match(LaTeXParser.R_BRACE) + pass + else: + raise NoViableAltException(self) + + self.state = 209 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,15,self._ctx) + if la_ == 1: + self.state = 208 + self.subexpr() + + + self.state = 215 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,16,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class Exp_nofuncContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def comp_nofunc(self): + return self.getTypedRuleContext(LaTeXParser.Comp_nofuncContext,0) + + + def exp_nofunc(self): + return self.getTypedRuleContext(LaTeXParser.Exp_nofuncContext,0) + + + def CARET(self): + return self.getToken(LaTeXParser.CARET, 0) + + def atom(self): + return self.getTypedRuleContext(LaTeXParser.AtomContext,0) + + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def subexpr(self): + return self.getTypedRuleContext(LaTeXParser.SubexprContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_exp_nofunc + + + + def exp_nofunc(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = LaTeXParser.Exp_nofuncContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 32 + self.enterRecursionRule(localctx, 32, self.RULE_exp_nofunc, _p) + try: + self.enterOuterAlt(localctx, 1) + self.state = 217 + self.comp_nofunc() + self._ctx.stop = self._input.LT(-1) + self.state = 233 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,19,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + localctx = LaTeXParser.Exp_nofuncContext(self, _parentctx, _parentState) + self.pushNewRecursionContext(localctx, _startState, self.RULE_exp_nofunc) + self.state = 219 + if not self.precpred(self._ctx, 2): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") + self.state = 220 + self.match(LaTeXParser.CARET) + self.state = 226 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [27, 29, 30, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + self.state = 221 + self.atom() + pass + elif token in [21]: + self.state = 222 + self.match(LaTeXParser.L_BRACE) + self.state = 223 + self.expr() + self.state = 224 + self.match(LaTeXParser.R_BRACE) + pass + else: + raise NoViableAltException(self) + + self.state = 229 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,18,self._ctx) + if la_ == 1: + self.state = 228 + self.subexpr() + + + self.state = 235 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,19,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class CompContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def group(self): + return self.getTypedRuleContext(LaTeXParser.GroupContext,0) + + + def abs_group(self): + return self.getTypedRuleContext(LaTeXParser.Abs_groupContext,0) + + + def func(self): + return self.getTypedRuleContext(LaTeXParser.FuncContext,0) + + + def atom(self): + return self.getTypedRuleContext(LaTeXParser.AtomContext,0) + + + def floor(self): + return self.getTypedRuleContext(LaTeXParser.FloorContext,0) + + + def ceil(self): + return self.getTypedRuleContext(LaTeXParser.CeilContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_comp + + + + + def comp(self): + + localctx = LaTeXParser.CompContext(self, self._ctx, self.state) + self.enterRule(localctx, 34, self.RULE_comp) + try: + self.state = 242 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,20,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 236 + self.group() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 237 + self.abs_group() + pass + + elif la_ == 3: + self.enterOuterAlt(localctx, 3) + self.state = 238 + self.func() + pass + + elif la_ == 4: + self.enterOuterAlt(localctx, 4) + self.state = 239 + self.atom() + pass + + elif la_ == 5: + self.enterOuterAlt(localctx, 5) + self.state = 240 + self.floor() + pass + + elif la_ == 6: + self.enterOuterAlt(localctx, 6) + self.state = 241 + self.ceil() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Comp_nofuncContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def group(self): + return self.getTypedRuleContext(LaTeXParser.GroupContext,0) + + + def abs_group(self): + return self.getTypedRuleContext(LaTeXParser.Abs_groupContext,0) + + + def atom(self): + return self.getTypedRuleContext(LaTeXParser.AtomContext,0) + + + def floor(self): + return self.getTypedRuleContext(LaTeXParser.FloorContext,0) + + + def ceil(self): + return self.getTypedRuleContext(LaTeXParser.CeilContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_comp_nofunc + + + + + def comp_nofunc(self): + + localctx = LaTeXParser.Comp_nofuncContext(self, self._ctx, self.state) + self.enterRule(localctx, 36, self.RULE_comp_nofunc) + try: + self.state = 249 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,21,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 244 + self.group() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 245 + self.abs_group() + pass + + elif la_ == 3: + self.enterOuterAlt(localctx, 3) + self.state = 246 + self.atom() + pass + + elif la_ == 4: + self.enterOuterAlt(localctx, 4) + self.state = 247 + self.floor() + pass + + elif la_ == 5: + self.enterOuterAlt(localctx, 5) + self.state = 248 + self.ceil() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class GroupContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def L_PAREN(self): + return self.getToken(LaTeXParser.L_PAREN, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_PAREN(self): + return self.getToken(LaTeXParser.R_PAREN, 0) + + def L_BRACKET(self): + return self.getToken(LaTeXParser.L_BRACKET, 0) + + def R_BRACKET(self): + return self.getToken(LaTeXParser.R_BRACKET, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def L_BRACE_LITERAL(self): + return self.getToken(LaTeXParser.L_BRACE_LITERAL, 0) + + def R_BRACE_LITERAL(self): + return self.getToken(LaTeXParser.R_BRACE_LITERAL, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_group + + + + + def group(self): + + localctx = LaTeXParser.GroupContext(self, self._ctx, self.state) + self.enterRule(localctx, 38, self.RULE_group) + try: + self.state = 267 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [19]: + self.enterOuterAlt(localctx, 1) + self.state = 251 + self.match(LaTeXParser.L_PAREN) + self.state = 252 + self.expr() + self.state = 253 + self.match(LaTeXParser.R_PAREN) + pass + elif token in [25]: + self.enterOuterAlt(localctx, 2) + self.state = 255 + self.match(LaTeXParser.L_BRACKET) + self.state = 256 + self.expr() + self.state = 257 + self.match(LaTeXParser.R_BRACKET) + pass + elif token in [21]: + self.enterOuterAlt(localctx, 3) + self.state = 259 + self.match(LaTeXParser.L_BRACE) + self.state = 260 + self.expr() + self.state = 261 + self.match(LaTeXParser.R_BRACE) + pass + elif token in [23]: + self.enterOuterAlt(localctx, 4) + self.state = 263 + self.match(LaTeXParser.L_BRACE_LITERAL) + self.state = 264 + self.expr() + self.state = 265 + self.match(LaTeXParser.R_BRACE_LITERAL) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Abs_groupContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def BAR(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.BAR) + else: + return self.getToken(LaTeXParser.BAR, i) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_abs_group + + + + + def abs_group(self): + + localctx = LaTeXParser.Abs_groupContext(self, self._ctx, self.state) + self.enterRule(localctx, 40, self.RULE_abs_group) + try: + self.enterOuterAlt(localctx, 1) + self.state = 269 + self.match(LaTeXParser.BAR) + self.state = 270 + self.expr() + self.state = 271 + self.match(LaTeXParser.BAR) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class NumberContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def DIGIT(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.DIGIT) + else: + return self.getToken(LaTeXParser.DIGIT, i) + + def getRuleIndex(self): + return LaTeXParser.RULE_number + + + + + def number(self): + + localctx = LaTeXParser.NumberContext(self, self._ctx, self.state) + self.enterRule(localctx, 42, self.RULE_number) + try: + self.enterOuterAlt(localctx, 1) + self.state = 274 + self._errHandler.sync(self) + _alt = 1 + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1: + self.state = 273 + self.match(LaTeXParser.DIGIT) + + else: + raise NoViableAltException(self) + self.state = 276 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,23,self._ctx) + + self.state = 284 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,24,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 278 + self.match(LaTeXParser.T__0) + self.state = 279 + self.match(LaTeXParser.DIGIT) + self.state = 280 + self.match(LaTeXParser.DIGIT) + self.state = 281 + self.match(LaTeXParser.DIGIT) + self.state = 286 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,24,self._ctx) + + self.state = 293 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,26,self._ctx) + if la_ == 1: + self.state = 287 + self.match(LaTeXParser.T__1) + self.state = 289 + self._errHandler.sync(self) + _alt = 1 + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1: + self.state = 288 + self.match(LaTeXParser.DIGIT) + + else: + raise NoViableAltException(self) + self.state = 291 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,25,self._ctx) + + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AtomContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LETTER(self): + return self.getToken(LaTeXParser.LETTER, 0) + + def SYMBOL(self): + return self.getToken(LaTeXParser.SYMBOL, 0) + + def subexpr(self): + return self.getTypedRuleContext(LaTeXParser.SubexprContext,0) + + + def SINGLE_QUOTES(self): + return self.getToken(LaTeXParser.SINGLE_QUOTES, 0) + + def number(self): + return self.getTypedRuleContext(LaTeXParser.NumberContext,0) + + + def DIFFERENTIAL(self): + return self.getToken(LaTeXParser.DIFFERENTIAL, 0) + + def mathit(self): + return self.getTypedRuleContext(LaTeXParser.MathitContext,0) + + + def frac(self): + return self.getTypedRuleContext(LaTeXParser.FracContext,0) + + + def binom(self): + return self.getTypedRuleContext(LaTeXParser.BinomContext,0) + + + def bra(self): + return self.getTypedRuleContext(LaTeXParser.BraContext,0) + + + def ket(self): + return self.getTypedRuleContext(LaTeXParser.KetContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_atom + + + + + def atom(self): + + localctx = LaTeXParser.AtomContext(self, self._ctx, self.state) + self.enterRule(localctx, 44, self.RULE_atom) + self._la = 0 # Token type + try: + self.state = 317 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [77, 91]: + self.enterOuterAlt(localctx, 1) + self.state = 295 + _la = self._input.LA(1) + if not(_la==77 or _la==91): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 308 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,31,self._ctx) + if la_ == 1: + self.state = 297 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,27,self._ctx) + if la_ == 1: + self.state = 296 + self.subexpr() + + + self.state = 300 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,28,self._ctx) + if la_ == 1: + self.state = 299 + self.match(LaTeXParser.SINGLE_QUOTES) + + + pass + + elif la_ == 2: + self.state = 303 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,29,self._ctx) + if la_ == 1: + self.state = 302 + self.match(LaTeXParser.SINGLE_QUOTES) + + + self.state = 306 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,30,self._ctx) + if la_ == 1: + self.state = 305 + self.subexpr() + + + pass + + + pass + elif token in [78]: + self.enterOuterAlt(localctx, 2) + self.state = 310 + self.number() + pass + elif token in [76]: + self.enterOuterAlt(localctx, 3) + self.state = 311 + self.match(LaTeXParser.DIFFERENTIAL) + pass + elif token in [72]: + self.enterOuterAlt(localctx, 4) + self.state = 312 + self.mathit() + pass + elif token in [68]: + self.enterOuterAlt(localctx, 5) + self.state = 313 + self.frac() + pass + elif token in [69, 70, 71]: + self.enterOuterAlt(localctx, 6) + self.state = 314 + self.binom() + pass + elif token in [30]: + self.enterOuterAlt(localctx, 7) + self.state = 315 + self.bra() + pass + elif token in [27, 29]: + self.enterOuterAlt(localctx, 8) + self.state = 316 + self.ket() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class BraContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def L_ANGLE(self): + return self.getToken(LaTeXParser.L_ANGLE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_BAR(self): + return self.getToken(LaTeXParser.R_BAR, 0) + + def BAR(self): + return self.getToken(LaTeXParser.BAR, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_bra + + + + + def bra(self): + + localctx = LaTeXParser.BraContext(self, self._ctx, self.state) + self.enterRule(localctx, 46, self.RULE_bra) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 319 + self.match(LaTeXParser.L_ANGLE) + self.state = 320 + self.expr() + self.state = 321 + _la = self._input.LA(1) + if not(_la==27 or _la==28): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class KetContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_ANGLE(self): + return self.getToken(LaTeXParser.R_ANGLE, 0) + + def L_BAR(self): + return self.getToken(LaTeXParser.L_BAR, 0) + + def BAR(self): + return self.getToken(LaTeXParser.BAR, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_ket + + + + + def ket(self): + + localctx = LaTeXParser.KetContext(self, self._ctx, self.state) + self.enterRule(localctx, 48, self.RULE_ket) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 323 + _la = self._input.LA(1) + if not(_la==27 or _la==29): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 324 + self.expr() + self.state = 325 + self.match(LaTeXParser.R_ANGLE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class MathitContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def CMD_MATHIT(self): + return self.getToken(LaTeXParser.CMD_MATHIT, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def mathit_text(self): + return self.getTypedRuleContext(LaTeXParser.Mathit_textContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_mathit + + + + + def mathit(self): + + localctx = LaTeXParser.MathitContext(self, self._ctx, self.state) + self.enterRule(localctx, 50, self.RULE_mathit) + try: + self.enterOuterAlt(localctx, 1) + self.state = 327 + self.match(LaTeXParser.CMD_MATHIT) + self.state = 328 + self.match(LaTeXParser.L_BRACE) + self.state = 329 + self.mathit_text() + self.state = 330 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Mathit_textContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LETTER(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.LETTER) + else: + return self.getToken(LaTeXParser.LETTER, i) + + def getRuleIndex(self): + return LaTeXParser.RULE_mathit_text + + + + + def mathit_text(self): + + localctx = LaTeXParser.Mathit_textContext(self, self._ctx, self.state) + self.enterRule(localctx, 52, self.RULE_mathit_text) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 335 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==77: + self.state = 332 + self.match(LaTeXParser.LETTER) + self.state = 337 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class FracContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.upperd = None # Token + self.upper = None # ExprContext + self.lowerd = None # Token + self.lower = None # ExprContext + + def CMD_FRAC(self): + return self.getToken(LaTeXParser.CMD_FRAC, 0) + + def L_BRACE(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.L_BRACE) + else: + return self.getToken(LaTeXParser.L_BRACE, i) + + def R_BRACE(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.R_BRACE) + else: + return self.getToken(LaTeXParser.R_BRACE, i) + + def DIGIT(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.DIGIT) + else: + return self.getToken(LaTeXParser.DIGIT, i) + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.ExprContext) + else: + return self.getTypedRuleContext(LaTeXParser.ExprContext,i) + + + def getRuleIndex(self): + return LaTeXParser.RULE_frac + + + + + def frac(self): + + localctx = LaTeXParser.FracContext(self, self._ctx, self.state) + self.enterRule(localctx, 54, self.RULE_frac) + try: + self.enterOuterAlt(localctx, 1) + self.state = 338 + self.match(LaTeXParser.CMD_FRAC) + self.state = 344 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [78]: + self.state = 339 + localctx.upperd = self.match(LaTeXParser.DIGIT) + pass + elif token in [21]: + self.state = 340 + self.match(LaTeXParser.L_BRACE) + self.state = 341 + localctx.upper = self.expr() + self.state = 342 + self.match(LaTeXParser.R_BRACE) + pass + else: + raise NoViableAltException(self) + + self.state = 351 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [78]: + self.state = 346 + localctx.lowerd = self.match(LaTeXParser.DIGIT) + pass + elif token in [21]: + self.state = 347 + self.match(LaTeXParser.L_BRACE) + self.state = 348 + localctx.lower = self.expr() + self.state = 349 + self.match(LaTeXParser.R_BRACE) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class BinomContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.n = None # ExprContext + self.k = None # ExprContext + + def L_BRACE(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.L_BRACE) + else: + return self.getToken(LaTeXParser.L_BRACE, i) + + def R_BRACE(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.R_BRACE) + else: + return self.getToken(LaTeXParser.R_BRACE, i) + + def CMD_BINOM(self): + return self.getToken(LaTeXParser.CMD_BINOM, 0) + + def CMD_DBINOM(self): + return self.getToken(LaTeXParser.CMD_DBINOM, 0) + + def CMD_TBINOM(self): + return self.getToken(LaTeXParser.CMD_TBINOM, 0) + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.ExprContext) + else: + return self.getTypedRuleContext(LaTeXParser.ExprContext,i) + + + def getRuleIndex(self): + return LaTeXParser.RULE_binom + + + + + def binom(self): + + localctx = LaTeXParser.BinomContext(self, self._ctx, self.state) + self.enterRule(localctx, 56, self.RULE_binom) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 353 + _la = self._input.LA(1) + if not((((_la - 69)) & ~0x3f) == 0 and ((1 << (_la - 69)) & 7) != 0): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 354 + self.match(LaTeXParser.L_BRACE) + self.state = 355 + localctx.n = self.expr() + self.state = 356 + self.match(LaTeXParser.R_BRACE) + self.state = 357 + self.match(LaTeXParser.L_BRACE) + self.state = 358 + localctx.k = self.expr() + self.state = 359 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class FloorContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.val = None # ExprContext + + def L_FLOOR(self): + return self.getToken(LaTeXParser.L_FLOOR, 0) + + def R_FLOOR(self): + return self.getToken(LaTeXParser.R_FLOOR, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_floor + + + + + def floor(self): + + localctx = LaTeXParser.FloorContext(self, self._ctx, self.state) + self.enterRule(localctx, 58, self.RULE_floor) + try: + self.enterOuterAlt(localctx, 1) + self.state = 361 + self.match(LaTeXParser.L_FLOOR) + self.state = 362 + localctx.val = self.expr() + self.state = 363 + self.match(LaTeXParser.R_FLOOR) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class CeilContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.val = None # ExprContext + + def L_CEIL(self): + return self.getToken(LaTeXParser.L_CEIL, 0) + + def R_CEIL(self): + return self.getToken(LaTeXParser.R_CEIL, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_ceil + + + + + def ceil(self): + + localctx = LaTeXParser.CeilContext(self, self._ctx, self.state) + self.enterRule(localctx, 60, self.RULE_ceil) + try: + self.enterOuterAlt(localctx, 1) + self.state = 365 + self.match(LaTeXParser.L_CEIL) + self.state = 366 + localctx.val = self.expr() + self.state = 367 + self.match(LaTeXParser.R_CEIL) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Func_normalContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def FUNC_EXP(self): + return self.getToken(LaTeXParser.FUNC_EXP, 0) + + def FUNC_LOG(self): + return self.getToken(LaTeXParser.FUNC_LOG, 0) + + def FUNC_LG(self): + return self.getToken(LaTeXParser.FUNC_LG, 0) + + def FUNC_LN(self): + return self.getToken(LaTeXParser.FUNC_LN, 0) + + def FUNC_SIN(self): + return self.getToken(LaTeXParser.FUNC_SIN, 0) + + def FUNC_COS(self): + return self.getToken(LaTeXParser.FUNC_COS, 0) + + def FUNC_TAN(self): + return self.getToken(LaTeXParser.FUNC_TAN, 0) + + def FUNC_CSC(self): + return self.getToken(LaTeXParser.FUNC_CSC, 0) + + def FUNC_SEC(self): + return self.getToken(LaTeXParser.FUNC_SEC, 0) + + def FUNC_COT(self): + return self.getToken(LaTeXParser.FUNC_COT, 0) + + def FUNC_ARCSIN(self): + return self.getToken(LaTeXParser.FUNC_ARCSIN, 0) + + def FUNC_ARCCOS(self): + return self.getToken(LaTeXParser.FUNC_ARCCOS, 0) + + def FUNC_ARCTAN(self): + return self.getToken(LaTeXParser.FUNC_ARCTAN, 0) + + def FUNC_ARCCSC(self): + return self.getToken(LaTeXParser.FUNC_ARCCSC, 0) + + def FUNC_ARCSEC(self): + return self.getToken(LaTeXParser.FUNC_ARCSEC, 0) + + def FUNC_ARCCOT(self): + return self.getToken(LaTeXParser.FUNC_ARCCOT, 0) + + def FUNC_SINH(self): + return self.getToken(LaTeXParser.FUNC_SINH, 0) + + def FUNC_COSH(self): + return self.getToken(LaTeXParser.FUNC_COSH, 0) + + def FUNC_TANH(self): + return self.getToken(LaTeXParser.FUNC_TANH, 0) + + def FUNC_ARSINH(self): + return self.getToken(LaTeXParser.FUNC_ARSINH, 0) + + def FUNC_ARCOSH(self): + return self.getToken(LaTeXParser.FUNC_ARCOSH, 0) + + def FUNC_ARTANH(self): + return self.getToken(LaTeXParser.FUNC_ARTANH, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_func_normal + + + + + def func_normal(self): + + localctx = LaTeXParser.Func_normalContext(self, self._ctx, self.state) + self.enterRule(localctx, 62, self.RULE_func_normal) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 369 + _la = self._input.LA(1) + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & 576460614864470016) != 0): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class FuncContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.root = None # ExprContext + self.base = None # ExprContext + + def func_normal(self): + return self.getTypedRuleContext(LaTeXParser.Func_normalContext,0) + + + def L_PAREN(self): + return self.getToken(LaTeXParser.L_PAREN, 0) + + def func_arg(self): + return self.getTypedRuleContext(LaTeXParser.Func_argContext,0) + + + def R_PAREN(self): + return self.getToken(LaTeXParser.R_PAREN, 0) + + def func_arg_noparens(self): + return self.getTypedRuleContext(LaTeXParser.Func_arg_noparensContext,0) + + + def subexpr(self): + return self.getTypedRuleContext(LaTeXParser.SubexprContext,0) + + + def supexpr(self): + return self.getTypedRuleContext(LaTeXParser.SupexprContext,0) + + + def args(self): + return self.getTypedRuleContext(LaTeXParser.ArgsContext,0) + + + def LETTER(self): + return self.getToken(LaTeXParser.LETTER, 0) + + def SYMBOL(self): + return self.getToken(LaTeXParser.SYMBOL, 0) + + def SINGLE_QUOTES(self): + return self.getToken(LaTeXParser.SINGLE_QUOTES, 0) + + def FUNC_INT(self): + return self.getToken(LaTeXParser.FUNC_INT, 0) + + def DIFFERENTIAL(self): + return self.getToken(LaTeXParser.DIFFERENTIAL, 0) + + def frac(self): + return self.getTypedRuleContext(LaTeXParser.FracContext,0) + + + def additive(self): + return self.getTypedRuleContext(LaTeXParser.AdditiveContext,0) + + + def FUNC_SQRT(self): + return self.getToken(LaTeXParser.FUNC_SQRT, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(LaTeXParser.ExprContext) + else: + return self.getTypedRuleContext(LaTeXParser.ExprContext,i) + + + def L_BRACKET(self): + return self.getToken(LaTeXParser.L_BRACKET, 0) + + def R_BRACKET(self): + return self.getToken(LaTeXParser.R_BRACKET, 0) + + def FUNC_OVERLINE(self): + return self.getToken(LaTeXParser.FUNC_OVERLINE, 0) + + def mp(self): + return self.getTypedRuleContext(LaTeXParser.MpContext,0) + + + def FUNC_SUM(self): + return self.getToken(LaTeXParser.FUNC_SUM, 0) + + def FUNC_PROD(self): + return self.getToken(LaTeXParser.FUNC_PROD, 0) + + def subeq(self): + return self.getTypedRuleContext(LaTeXParser.SubeqContext,0) + + + def FUNC_LIM(self): + return self.getToken(LaTeXParser.FUNC_LIM, 0) + + def limit_sub(self): + return self.getTypedRuleContext(LaTeXParser.Limit_subContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_func + + + + + def func(self): + + localctx = LaTeXParser.FuncContext(self, self._ctx, self.state) + self.enterRule(localctx, 64, self.RULE_func) + self._la = 0 # Token type + try: + self.state = 460 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58]: + self.enterOuterAlt(localctx, 1) + self.state = 371 + self.func_normal() + self.state = 384 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,40,self._ctx) + if la_ == 1: + self.state = 373 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==73: + self.state = 372 + self.subexpr() + + + self.state = 376 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==74: + self.state = 375 + self.supexpr() + + + pass + + elif la_ == 2: + self.state = 379 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==74: + self.state = 378 + self.supexpr() + + + self.state = 382 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==73: + self.state = 381 + self.subexpr() + + + pass + + + self.state = 391 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,41,self._ctx) + if la_ == 1: + self.state = 386 + self.match(LaTeXParser.L_PAREN) + self.state = 387 + self.func_arg() + self.state = 388 + self.match(LaTeXParser.R_PAREN) + pass + + elif la_ == 2: + self.state = 390 + self.func_arg_noparens() + pass + + + pass + elif token in [77, 91]: + self.enterOuterAlt(localctx, 2) + self.state = 393 + _la = self._input.LA(1) + if not(_la==77 or _la==91): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 406 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,46,self._ctx) + if la_ == 1: + self.state = 395 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==73: + self.state = 394 + self.subexpr() + + + self.state = 398 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==90: + self.state = 397 + self.match(LaTeXParser.SINGLE_QUOTES) + + + pass + + elif la_ == 2: + self.state = 401 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==90: + self.state = 400 + self.match(LaTeXParser.SINGLE_QUOTES) + + + self.state = 404 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==73: + self.state = 403 + self.subexpr() + + + pass + + + self.state = 408 + self.match(LaTeXParser.L_PAREN) + self.state = 409 + self.args() + self.state = 410 + self.match(LaTeXParser.R_PAREN) + pass + elif token in [34]: + self.enterOuterAlt(localctx, 3) + self.state = 412 + self.match(LaTeXParser.FUNC_INT) + self.state = 419 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [73]: + self.state = 413 + self.subexpr() + self.state = 414 + self.supexpr() + pass + elif token in [74]: + self.state = 416 + self.supexpr() + self.state = 417 + self.subexpr() + pass + elif token in [15, 16, 19, 21, 23, 25, 27, 29, 30, 32, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 61, 63, 64, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + pass + else: + pass + self.state = 427 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,49,self._ctx) + if la_ == 1: + self.state = 422 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,48,self._ctx) + if la_ == 1: + self.state = 421 + self.additive(0) + + + self.state = 424 + self.match(LaTeXParser.DIFFERENTIAL) + pass + + elif la_ == 2: + self.state = 425 + self.frac() + pass + + elif la_ == 3: + self.state = 426 + self.additive(0) + pass + + + pass + elif token in [63]: + self.enterOuterAlt(localctx, 4) + self.state = 429 + self.match(LaTeXParser.FUNC_SQRT) + self.state = 434 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==25: + self.state = 430 + self.match(LaTeXParser.L_BRACKET) + self.state = 431 + localctx.root = self.expr() + self.state = 432 + self.match(LaTeXParser.R_BRACKET) + + + self.state = 436 + self.match(LaTeXParser.L_BRACE) + self.state = 437 + localctx.base = self.expr() + self.state = 438 + self.match(LaTeXParser.R_BRACE) + pass + elif token in [64]: + self.enterOuterAlt(localctx, 5) + self.state = 440 + self.match(LaTeXParser.FUNC_OVERLINE) + self.state = 441 + self.match(LaTeXParser.L_BRACE) + self.state = 442 + localctx.base = self.expr() + self.state = 443 + self.match(LaTeXParser.R_BRACE) + pass + elif token in [35, 36]: + self.enterOuterAlt(localctx, 6) + self.state = 445 + _la = self._input.LA(1) + if not(_la==35 or _la==36): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 452 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [73]: + self.state = 446 + self.subeq() + self.state = 447 + self.supexpr() + pass + elif token in [74]: + self.state = 449 + self.supexpr() + self.state = 450 + self.subeq() + pass + else: + raise NoViableAltException(self) + + self.state = 454 + self.mp(0) + pass + elif token in [32]: + self.enterOuterAlt(localctx, 7) + self.state = 456 + self.match(LaTeXParser.FUNC_LIM) + self.state = 457 + self.limit_sub() + self.state = 458 + self.mp(0) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ArgsContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def args(self): + return self.getTypedRuleContext(LaTeXParser.ArgsContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_args + + + + + def args(self): + + localctx = LaTeXParser.ArgsContext(self, self._ctx, self.state) + self.enterRule(localctx, 66, self.RULE_args) + try: + self.state = 467 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,53,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 462 + self.expr() + self.state = 463 + self.match(LaTeXParser.T__0) + self.state = 464 + self.args() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 466 + self.expr() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Limit_subContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def UNDERSCORE(self): + return self.getToken(LaTeXParser.UNDERSCORE, 0) + + def L_BRACE(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.L_BRACE) + else: + return self.getToken(LaTeXParser.L_BRACE, i) + + def LIM_APPROACH_SYM(self): + return self.getToken(LaTeXParser.LIM_APPROACH_SYM, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_BRACE(self, i:int=None): + if i is None: + return self.getTokens(LaTeXParser.R_BRACE) + else: + return self.getToken(LaTeXParser.R_BRACE, i) + + def LETTER(self): + return self.getToken(LaTeXParser.LETTER, 0) + + def SYMBOL(self): + return self.getToken(LaTeXParser.SYMBOL, 0) + + def CARET(self): + return self.getToken(LaTeXParser.CARET, 0) + + def ADD(self): + return self.getToken(LaTeXParser.ADD, 0) + + def SUB(self): + return self.getToken(LaTeXParser.SUB, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_limit_sub + + + + + def limit_sub(self): + + localctx = LaTeXParser.Limit_subContext(self, self._ctx, self.state) + self.enterRule(localctx, 68, self.RULE_limit_sub) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 469 + self.match(LaTeXParser.UNDERSCORE) + self.state = 470 + self.match(LaTeXParser.L_BRACE) + self.state = 471 + _la = self._input.LA(1) + if not(_la==77 or _la==91): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 472 + self.match(LaTeXParser.LIM_APPROACH_SYM) + self.state = 473 + self.expr() + self.state = 482 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==74: + self.state = 474 + self.match(LaTeXParser.CARET) + self.state = 480 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [21]: + self.state = 475 + self.match(LaTeXParser.L_BRACE) + self.state = 476 + _la = self._input.LA(1) + if not(_la==15 or _la==16): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 477 + self.match(LaTeXParser.R_BRACE) + pass + elif token in [15]: + self.state = 478 + self.match(LaTeXParser.ADD) + pass + elif token in [16]: + self.state = 479 + self.match(LaTeXParser.SUB) + pass + else: + raise NoViableAltException(self) + + + + self.state = 484 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Func_argContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def func_arg(self): + return self.getTypedRuleContext(LaTeXParser.Func_argContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_func_arg + + + + + def func_arg(self): + + localctx = LaTeXParser.Func_argContext(self, self._ctx, self.state) + self.enterRule(localctx, 70, self.RULE_func_arg) + try: + self.state = 491 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,56,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 486 + self.expr() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 487 + self.expr() + self.state = 488 + self.match(LaTeXParser.T__0) + self.state = 489 + self.func_arg() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class Func_arg_noparensContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def mp_nofunc(self): + return self.getTypedRuleContext(LaTeXParser.Mp_nofuncContext,0) + + + def getRuleIndex(self): + return LaTeXParser.RULE_func_arg_noparens + + + + + def func_arg_noparens(self): + + localctx = LaTeXParser.Func_arg_noparensContext(self, self._ctx, self.state) + self.enterRule(localctx, 72, self.RULE_func_arg_noparens) + try: + self.enterOuterAlt(localctx, 1) + self.state = 493 + self.mp_nofunc(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SubexprContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def UNDERSCORE(self): + return self.getToken(LaTeXParser.UNDERSCORE, 0) + + def atom(self): + return self.getTypedRuleContext(LaTeXParser.AtomContext,0) + + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_subexpr + + + + + def subexpr(self): + + localctx = LaTeXParser.SubexprContext(self, self._ctx, self.state) + self.enterRule(localctx, 74, self.RULE_subexpr) + try: + self.enterOuterAlt(localctx, 1) + self.state = 495 + self.match(LaTeXParser.UNDERSCORE) + self.state = 501 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [27, 29, 30, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + self.state = 496 + self.atom() + pass + elif token in [21]: + self.state = 497 + self.match(LaTeXParser.L_BRACE) + self.state = 498 + self.expr() + self.state = 499 + self.match(LaTeXParser.R_BRACE) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SupexprContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def CARET(self): + return self.getToken(LaTeXParser.CARET, 0) + + def atom(self): + return self.getTypedRuleContext(LaTeXParser.AtomContext,0) + + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def expr(self): + return self.getTypedRuleContext(LaTeXParser.ExprContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_supexpr + + + + + def supexpr(self): + + localctx = LaTeXParser.SupexprContext(self, self._ctx, self.state) + self.enterRule(localctx, 76, self.RULE_supexpr) + try: + self.enterOuterAlt(localctx, 1) + self.state = 503 + self.match(LaTeXParser.CARET) + self.state = 509 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [27, 29, 30, 68, 69, 70, 71, 72, 76, 77, 78, 91]: + self.state = 504 + self.atom() + pass + elif token in [21]: + self.state = 505 + self.match(LaTeXParser.L_BRACE) + self.state = 506 + self.expr() + self.state = 507 + self.match(LaTeXParser.R_BRACE) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SubeqContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def UNDERSCORE(self): + return self.getToken(LaTeXParser.UNDERSCORE, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def equality(self): + return self.getTypedRuleContext(LaTeXParser.EqualityContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_subeq + + + + + def subeq(self): + + localctx = LaTeXParser.SubeqContext(self, self._ctx, self.state) + self.enterRule(localctx, 78, self.RULE_subeq) + try: + self.enterOuterAlt(localctx, 1) + self.state = 511 + self.match(LaTeXParser.UNDERSCORE) + self.state = 512 + self.match(LaTeXParser.L_BRACE) + self.state = 513 + self.equality() + self.state = 514 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SupeqContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def UNDERSCORE(self): + return self.getToken(LaTeXParser.UNDERSCORE, 0) + + def L_BRACE(self): + return self.getToken(LaTeXParser.L_BRACE, 0) + + def equality(self): + return self.getTypedRuleContext(LaTeXParser.EqualityContext,0) + + + def R_BRACE(self): + return self.getToken(LaTeXParser.R_BRACE, 0) + + def getRuleIndex(self): + return LaTeXParser.RULE_supeq + + + + + def supeq(self): + + localctx = LaTeXParser.SupeqContext(self, self._ctx, self.state) + self.enterRule(localctx, 80, self.RULE_supeq) + try: + self.enterOuterAlt(localctx, 1) + self.state = 516 + self.match(LaTeXParser.UNDERSCORE) + self.state = 517 + self.match(LaTeXParser.L_BRACE) + self.state = 518 + self.equality() + self.state = 519 + self.match(LaTeXParser.R_BRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + + def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): + if self._predicates == None: + self._predicates = dict() + self._predicates[1] = self.relation_sempred + self._predicates[4] = self.additive_sempred + self._predicates[5] = self.mp_sempred + self._predicates[6] = self.mp_nofunc_sempred + self._predicates[15] = self.exp_sempred + self._predicates[16] = self.exp_nofunc_sempred + pred = self._predicates.get(ruleIndex, None) + if pred is None: + raise Exception("No predicate with index:" + str(ruleIndex)) + else: + return pred(localctx, predIndex) + + def relation_sempred(self, localctx:RelationContext, predIndex:int): + if predIndex == 0: + return self.precpred(self._ctx, 2) + + + def additive_sempred(self, localctx:AdditiveContext, predIndex:int): + if predIndex == 1: + return self.precpred(self._ctx, 2) + + + def mp_sempred(self, localctx:MpContext, predIndex:int): + if predIndex == 2: + return self.precpred(self._ctx, 2) + + + def mp_nofunc_sempred(self, localctx:Mp_nofuncContext, predIndex:int): + if predIndex == 3: + return self.precpred(self._ctx, 2) + + + def exp_sempred(self, localctx:ExpContext, predIndex:int): + if predIndex == 4: + return self.precpred(self._ctx, 2) + + + def exp_nofunc_sempred(self, localctx:Exp_nofuncContext, predIndex:int): + if predIndex == 5: + return self.precpred(self._ctx, 2) + + + + + diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_build_latex_antlr.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_build_latex_antlr.py new file mode 100644 index 0000000000000000000000000000000000000000..a5502e9b0742f27f651a66449de5ce7a6a32a3cf --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_build_latex_antlr.py @@ -0,0 +1,91 @@ +import os +import subprocess +import glob + +from sympy.utilities.misc import debug + +here = os.path.dirname(__file__) +grammar_file = os.path.abspath(os.path.join(here, "LaTeX.g4")) +dir_latex_antlr = os.path.join(here, "_antlr") + +header = '''\ +# *** GENERATED BY `setup.py antlr`, DO NOT EDIT BY HAND *** +# +# Generated from ../LaTeX.g4, derived from latex2sympy +# latex2sympy is licensed under the MIT license +# https://github.com/augustt198/latex2sympy/blob/master/LICENSE.txt +# +# Generated with antlr4 +# antlr4 is licensed under the BSD-3-Clause License +# https://github.com/antlr/antlr4/blob/master/LICENSE.txt +''' + + +def check_antlr_version(): + debug("Checking antlr4 version...") + + try: + debug(subprocess.check_output(["antlr4"]) + .decode('utf-8').split("\n")[0]) + return True + except (subprocess.CalledProcessError, FileNotFoundError): + debug("The 'antlr4' command line tool is not installed, " + "or not on your PATH.\n" + "> Please refer to the README.md file for more information.") + return False + + +def build_parser(output_dir=dir_latex_antlr): + check_antlr_version() + + debug("Updating ANTLR-generated code in {}".format(output_dir)) + + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + with open(os.path.join(output_dir, "__init__.py"), "w+") as fp: + fp.write(header) + + args = [ + "antlr4", + grammar_file, + "-o", output_dir, + # for now, not generating these as latex2sympy did not use them + "-no-visitor", + "-no-listener", + ] + + debug("Running code generation...\n\t$ {}".format(" ".join(args))) + subprocess.check_output(args, cwd=output_dir) + + debug("Applying headers, removing unnecessary files and renaming...") + # Handle case insensitive file systems. If the files are already + # generated, they will be written to latex* but LaTeX*.* won't match them. + for path in (glob.glob(os.path.join(output_dir, "LaTeX*.*")) or + glob.glob(os.path.join(output_dir, "latex*.*"))): + + # Remove files ending in .interp or .tokens as they are not needed. + if not path.endswith(".py"): + os.unlink(path) + continue + + new_path = os.path.join(output_dir, os.path.basename(path).lower()) + with open(path, 'r') as f: + lines = [line.rstrip() + '\n' for line in f.readlines()] + + os.unlink(path) + + with open(new_path, "w") as out_file: + offset = 0 + while lines[offset].startswith('#'): + offset += 1 + out_file.write(header) + out_file.writelines(lines[offset:]) + + debug("\t{}".format(new_path)) + + return True + + +if __name__ == "__main__": + build_parser() diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_parse_latex_antlr.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_parse_latex_antlr.py new file mode 100644 index 0000000000000000000000000000000000000000..26604375b3a9622f8c1dacdb1d678d09c2c3ad41 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/_parse_latex_antlr.py @@ -0,0 +1,607 @@ +# Ported from latex2sympy by @augustt198 +# https://github.com/augustt198/latex2sympy +# See license in LICENSE.txt +from importlib.metadata import version +import sympy +from sympy.external import import_module +from sympy.printing.str import StrPrinter +from sympy.physics.quantum.state import Bra, Ket + +from .errors import LaTeXParsingError + + +LaTeXParser = LaTeXLexer = MathErrorListener = None + +try: + LaTeXParser = import_module('sympy.parsing.latex._antlr.latexparser', + import_kwargs={'fromlist': ['LaTeXParser']}).LaTeXParser + LaTeXLexer = import_module('sympy.parsing.latex._antlr.latexlexer', + import_kwargs={'fromlist': ['LaTeXLexer']}).LaTeXLexer +except Exception: + pass + +ErrorListener = import_module('antlr4.error.ErrorListener', + warn_not_installed=True, + import_kwargs={'fromlist': ['ErrorListener']} + ) + + + +if ErrorListener: + class MathErrorListener(ErrorListener.ErrorListener): # type:ignore # noqa:F811 + def __init__(self, src): + super(ErrorListener.ErrorListener, self).__init__() + self.src = src + + def syntaxError(self, recog, symbol, line, col, msg, e): + fmt = "%s\n%s\n%s" + marker = "~" * col + "^" + + if msg.startswith("missing"): + err = fmt % (msg, self.src, marker) + elif msg.startswith("no viable"): + err = fmt % ("I expected something else here", self.src, marker) + elif msg.startswith("mismatched"): + names = LaTeXParser.literalNames + expected = [ + names[i] for i in e.getExpectedTokens() if i < len(names) + ] + if len(expected) < 10: + expected = " ".join(expected) + err = (fmt % ("I expected one of these: " + expected, self.src, + marker)) + else: + err = (fmt % ("I expected something else here", self.src, + marker)) + else: + err = fmt % ("I don't understand this", self.src, marker) + raise LaTeXParsingError(err) + + +def parse_latex(sympy, strict=False): + antlr4 = import_module('antlr4') + + if None in [antlr4, MathErrorListener] or \ + not version('antlr4-python3-runtime').startswith('4.11'): + raise ImportError("LaTeX parsing requires the antlr4 Python package," + " provided by pip (antlr4-python3-runtime) or" + " conda (antlr-python-runtime), version 4.11") + + sympy = sympy.strip() + matherror = MathErrorListener(sympy) + + stream = antlr4.InputStream(sympy) + lex = LaTeXLexer(stream) + lex.removeErrorListeners() + lex.addErrorListener(matherror) + + tokens = antlr4.CommonTokenStream(lex) + parser = LaTeXParser(tokens) + + # remove default console error listener + parser.removeErrorListeners() + parser.addErrorListener(matherror) + + relation = parser.math().relation() + if strict and (relation.start.start != 0 or relation.stop.stop != len(sympy) - 1): + raise LaTeXParsingError("Invalid LaTeX") + expr = convert_relation(relation) + + return expr + + +def convert_relation(rel): + if rel.expr(): + return convert_expr(rel.expr()) + + lh = convert_relation(rel.relation(0)) + rh = convert_relation(rel.relation(1)) + if rel.LT(): + return sympy.StrictLessThan(lh, rh) + elif rel.LTE(): + return sympy.LessThan(lh, rh) + elif rel.GT(): + return sympy.StrictGreaterThan(lh, rh) + elif rel.GTE(): + return sympy.GreaterThan(lh, rh) + elif rel.EQUAL(): + return sympy.Eq(lh, rh) + elif rel.NEQ(): + return sympy.Ne(lh, rh) + + +def convert_expr(expr): + return convert_add(expr.additive()) + + +def convert_add(add): + if add.ADD(): + lh = convert_add(add.additive(0)) + rh = convert_add(add.additive(1)) + return sympy.Add(lh, rh, evaluate=False) + elif add.SUB(): + lh = convert_add(add.additive(0)) + rh = convert_add(add.additive(1)) + if hasattr(rh, "is_Atom") and rh.is_Atom: + return sympy.Add(lh, -1 * rh, evaluate=False) + return sympy.Add(lh, sympy.Mul(-1, rh, evaluate=False), evaluate=False) + else: + return convert_mp(add.mp()) + + +def convert_mp(mp): + if hasattr(mp, 'mp'): + mp_left = mp.mp(0) + mp_right = mp.mp(1) + else: + mp_left = mp.mp_nofunc(0) + mp_right = mp.mp_nofunc(1) + + if mp.MUL() or mp.CMD_TIMES() or mp.CMD_CDOT(): + lh = convert_mp(mp_left) + rh = convert_mp(mp_right) + return sympy.Mul(lh, rh, evaluate=False) + elif mp.DIV() or mp.CMD_DIV() or mp.COLON(): + lh = convert_mp(mp_left) + rh = convert_mp(mp_right) + return sympy.Mul(lh, sympy.Pow(rh, -1, evaluate=False), evaluate=False) + else: + if hasattr(mp, 'unary'): + return convert_unary(mp.unary()) + else: + return convert_unary(mp.unary_nofunc()) + + +def convert_unary(unary): + if hasattr(unary, 'unary'): + nested_unary = unary.unary() + else: + nested_unary = unary.unary_nofunc() + if hasattr(unary, 'postfix_nofunc'): + first = unary.postfix() + tail = unary.postfix_nofunc() + postfix = [first] + tail + else: + postfix = unary.postfix() + + if unary.ADD(): + return convert_unary(nested_unary) + elif unary.SUB(): + numabs = convert_unary(nested_unary) + # Use Integer(-n) instead of Mul(-1, n) + return -numabs + elif postfix: + return convert_postfix_list(postfix) + + +def convert_postfix_list(arr, i=0): + if i >= len(arr): + raise LaTeXParsingError("Index out of bounds") + + res = convert_postfix(arr[i]) + if isinstance(res, sympy.Expr): + if i == len(arr) - 1: + return res # nothing to multiply by + else: + if i > 0: + left = convert_postfix(arr[i - 1]) + right = convert_postfix(arr[i + 1]) + if isinstance(left, sympy.Expr) and isinstance( + right, sympy.Expr): + left_syms = convert_postfix(arr[i - 1]).atoms(sympy.Symbol) + right_syms = convert_postfix(arr[i + 1]).atoms( + sympy.Symbol) + # if the left and right sides contain no variables and the + # symbol in between is 'x', treat as multiplication. + if not (left_syms or right_syms) and str(res) == 'x': + return convert_postfix_list(arr, i + 1) + # multiply by next + return sympy.Mul( + res, convert_postfix_list(arr, i + 1), evaluate=False) + else: # must be derivative + wrt = res[0] + if i == len(arr) - 1: + raise LaTeXParsingError("Expected expression for derivative") + else: + expr = convert_postfix_list(arr, i + 1) + return sympy.Derivative(expr, wrt) + + +def do_subs(expr, at): + if at.expr(): + at_expr = convert_expr(at.expr()) + syms = at_expr.atoms(sympy.Symbol) + if len(syms) == 0: + return expr + elif len(syms) > 0: + sym = next(iter(syms)) + return expr.subs(sym, at_expr) + elif at.equality(): + lh = convert_expr(at.equality().expr(0)) + rh = convert_expr(at.equality().expr(1)) + return expr.subs(lh, rh) + + +def convert_postfix(postfix): + if hasattr(postfix, 'exp'): + exp_nested = postfix.exp() + else: + exp_nested = postfix.exp_nofunc() + + exp = convert_exp(exp_nested) + for op in postfix.postfix_op(): + if op.BANG(): + if isinstance(exp, list): + raise LaTeXParsingError("Cannot apply postfix to derivative") + exp = sympy.factorial(exp, evaluate=False) + elif op.eval_at(): + ev = op.eval_at() + at_b = None + at_a = None + if ev.eval_at_sup(): + at_b = do_subs(exp, ev.eval_at_sup()) + if ev.eval_at_sub(): + at_a = do_subs(exp, ev.eval_at_sub()) + if at_b is not None and at_a is not None: + exp = sympy.Add(at_b, -1 * at_a, evaluate=False) + elif at_b is not None: + exp = at_b + elif at_a is not None: + exp = at_a + + return exp + + +def convert_exp(exp): + if hasattr(exp, 'exp'): + exp_nested = exp.exp() + else: + exp_nested = exp.exp_nofunc() + + if exp_nested: + base = convert_exp(exp_nested) + if isinstance(base, list): + raise LaTeXParsingError("Cannot raise derivative to power") + if exp.atom(): + exponent = convert_atom(exp.atom()) + elif exp.expr(): + exponent = convert_expr(exp.expr()) + return sympy.Pow(base, exponent, evaluate=False) + else: + if hasattr(exp, 'comp'): + return convert_comp(exp.comp()) + else: + return convert_comp(exp.comp_nofunc()) + + +def convert_comp(comp): + if comp.group(): + return convert_expr(comp.group().expr()) + elif comp.abs_group(): + return sympy.Abs(convert_expr(comp.abs_group().expr()), evaluate=False) + elif comp.atom(): + return convert_atom(comp.atom()) + elif comp.floor(): + return convert_floor(comp.floor()) + elif comp.ceil(): + return convert_ceil(comp.ceil()) + elif comp.func(): + return convert_func(comp.func()) + + +def convert_atom(atom): + if atom.LETTER(): + sname = atom.LETTER().getText() + if atom.subexpr(): + if atom.subexpr().expr(): # subscript is expr + subscript = convert_expr(atom.subexpr().expr()) + else: # subscript is atom + subscript = convert_atom(atom.subexpr().atom()) + sname += '_{' + StrPrinter().doprint(subscript) + '}' + if atom.SINGLE_QUOTES(): + sname += atom.SINGLE_QUOTES().getText() # put after subscript for easy identify + return sympy.Symbol(sname) + elif atom.SYMBOL(): + s = atom.SYMBOL().getText()[1:] + if s == "infty": + return sympy.oo + else: + if atom.subexpr(): + subscript = None + if atom.subexpr().expr(): # subscript is expr + subscript = convert_expr(atom.subexpr().expr()) + else: # subscript is atom + subscript = convert_atom(atom.subexpr().atom()) + subscriptName = StrPrinter().doprint(subscript) + s += '_{' + subscriptName + '}' + return sympy.Symbol(s) + elif atom.number(): + s = atom.number().getText().replace(",", "") + return sympy.Number(s) + elif atom.DIFFERENTIAL(): + var = get_differential_var(atom.DIFFERENTIAL()) + return sympy.Symbol('d' + var.name) + elif atom.mathit(): + text = rule2text(atom.mathit().mathit_text()) + return sympy.Symbol(text) + elif atom.frac(): + return convert_frac(atom.frac()) + elif atom.binom(): + return convert_binom(atom.binom()) + elif atom.bra(): + val = convert_expr(atom.bra().expr()) + return Bra(val) + elif atom.ket(): + val = convert_expr(atom.ket().expr()) + return Ket(val) + + +def rule2text(ctx): + stream = ctx.start.getInputStream() + # starting index of starting token + startIdx = ctx.start.start + # stopping index of stopping token + stopIdx = ctx.stop.stop + + return stream.getText(startIdx, stopIdx) + + +def convert_frac(frac): + diff_op = False + partial_op = False + if frac.lower and frac.upper: + lower_itv = frac.lower.getSourceInterval() + lower_itv_len = lower_itv[1] - lower_itv[0] + 1 + if (frac.lower.start == frac.lower.stop + and frac.lower.start.type == LaTeXLexer.DIFFERENTIAL): + wrt = get_differential_var_str(frac.lower.start.text) + diff_op = True + elif (lower_itv_len == 2 and frac.lower.start.type == LaTeXLexer.SYMBOL + and frac.lower.start.text == '\\partial' + and (frac.lower.stop.type == LaTeXLexer.LETTER + or frac.lower.stop.type == LaTeXLexer.SYMBOL)): + partial_op = True + wrt = frac.lower.stop.text + if frac.lower.stop.type == LaTeXLexer.SYMBOL: + wrt = wrt[1:] + + if diff_op or partial_op: + wrt = sympy.Symbol(wrt) + if (diff_op and frac.upper.start == frac.upper.stop + and frac.upper.start.type == LaTeXLexer.LETTER + and frac.upper.start.text == 'd'): + return [wrt] + elif (partial_op and frac.upper.start == frac.upper.stop + and frac.upper.start.type == LaTeXLexer.SYMBOL + and frac.upper.start.text == '\\partial'): + return [wrt] + upper_text = rule2text(frac.upper) + + expr_top = None + if diff_op and upper_text.startswith('d'): + expr_top = parse_latex(upper_text[1:]) + elif partial_op and frac.upper.start.text == '\\partial': + expr_top = parse_latex(upper_text[len('\\partial'):]) + if expr_top: + return sympy.Derivative(expr_top, wrt) + if frac.upper: + expr_top = convert_expr(frac.upper) + else: + expr_top = sympy.Number(frac.upperd.text) + if frac.lower: + expr_bot = convert_expr(frac.lower) + else: + expr_bot = sympy.Number(frac.lowerd.text) + inverse_denom = sympy.Pow(expr_bot, -1, evaluate=False) + if expr_top == 1: + return inverse_denom + else: + return sympy.Mul(expr_top, inverse_denom, evaluate=False) + +def convert_binom(binom): + expr_n = convert_expr(binom.n) + expr_k = convert_expr(binom.k) + return sympy.binomial(expr_n, expr_k, evaluate=False) + +def convert_floor(floor): + val = convert_expr(floor.val) + return sympy.floor(val, evaluate=False) + +def convert_ceil(ceil): + val = convert_expr(ceil.val) + return sympy.ceiling(val, evaluate=False) + +def convert_func(func): + if func.func_normal(): + if func.L_PAREN(): # function called with parenthesis + arg = convert_func_arg(func.func_arg()) + else: + arg = convert_func_arg(func.func_arg_noparens()) + + name = func.func_normal().start.text[1:] + + # change arc -> a + if name in [ + "arcsin", "arccos", "arctan", "arccsc", "arcsec", "arccot" + ]: + name = "a" + name[3:] + expr = getattr(sympy.functions, name)(arg, evaluate=False) + if name in ["arsinh", "arcosh", "artanh"]: + name = "a" + name[2:] + expr = getattr(sympy.functions, name)(arg, evaluate=False) + + if name == "exp": + expr = sympy.exp(arg, evaluate=False) + + if name in ("log", "lg", "ln"): + if func.subexpr(): + if func.subexpr().expr(): + base = convert_expr(func.subexpr().expr()) + else: + base = convert_atom(func.subexpr().atom()) + elif name == "lg": # ISO 80000-2:2019 + base = 10 + elif name in ("ln", "log"): # SymPy's latex printer prints ln as log by default + base = sympy.E + expr = sympy.log(arg, base, evaluate=False) + + func_pow = None + should_pow = True + if func.supexpr(): + if func.supexpr().expr(): + func_pow = convert_expr(func.supexpr().expr()) + else: + func_pow = convert_atom(func.supexpr().atom()) + + if name in [ + "sin", "cos", "tan", "csc", "sec", "cot", "sinh", "cosh", + "tanh" + ]: + if func_pow == -1: + name = "a" + name + should_pow = False + expr = getattr(sympy.functions, name)(arg, evaluate=False) + + if func_pow and should_pow: + expr = sympy.Pow(expr, func_pow, evaluate=False) + + return expr + elif func.LETTER() or func.SYMBOL(): + if func.LETTER(): + fname = func.LETTER().getText() + elif func.SYMBOL(): + fname = func.SYMBOL().getText()[1:] + fname = str(fname) # can't be unicode + if func.subexpr(): + if func.subexpr().expr(): # subscript is expr + subscript = convert_expr(func.subexpr().expr()) + else: # subscript is atom + subscript = convert_atom(func.subexpr().atom()) + subscriptName = StrPrinter().doprint(subscript) + fname += '_{' + subscriptName + '}' + if func.SINGLE_QUOTES(): + fname += func.SINGLE_QUOTES().getText() + input_args = func.args() + output_args = [] + while input_args.args(): # handle multiple arguments to function + output_args.append(convert_expr(input_args.expr())) + input_args = input_args.args() + output_args.append(convert_expr(input_args.expr())) + return sympy.Function(fname)(*output_args) + elif func.FUNC_INT(): + return handle_integral(func) + elif func.FUNC_SQRT(): + expr = convert_expr(func.base) + if func.root: + r = convert_expr(func.root) + return sympy.root(expr, r, evaluate=False) + else: + return sympy.sqrt(expr, evaluate=False) + elif func.FUNC_OVERLINE(): + expr = convert_expr(func.base) + return sympy.conjugate(expr, evaluate=False) + elif func.FUNC_SUM(): + return handle_sum_or_prod(func, "summation") + elif func.FUNC_PROD(): + return handle_sum_or_prod(func, "product") + elif func.FUNC_LIM(): + return handle_limit(func) + + +def convert_func_arg(arg): + if hasattr(arg, 'expr'): + return convert_expr(arg.expr()) + else: + return convert_mp(arg.mp_nofunc()) + + +def handle_integral(func): + if func.additive(): + integrand = convert_add(func.additive()) + elif func.frac(): + integrand = convert_frac(func.frac()) + else: + integrand = 1 + + int_var = None + if func.DIFFERENTIAL(): + int_var = get_differential_var(func.DIFFERENTIAL()) + else: + for sym in integrand.atoms(sympy.Symbol): + s = str(sym) + if len(s) > 1 and s[0] == 'd': + if s[1] == '\\': + int_var = sympy.Symbol(s[2:]) + else: + int_var = sympy.Symbol(s[1:]) + int_sym = sym + if int_var: + integrand = integrand.subs(int_sym, 1) + else: + # Assume dx by default + int_var = sympy.Symbol('x') + + if func.subexpr(): + if func.subexpr().atom(): + lower = convert_atom(func.subexpr().atom()) + else: + lower = convert_expr(func.subexpr().expr()) + if func.supexpr().atom(): + upper = convert_atom(func.supexpr().atom()) + else: + upper = convert_expr(func.supexpr().expr()) + return sympy.Integral(integrand, (int_var, lower, upper)) + else: + return sympy.Integral(integrand, int_var) + + +def handle_sum_or_prod(func, name): + val = convert_mp(func.mp()) + iter_var = convert_expr(func.subeq().equality().expr(0)) + start = convert_expr(func.subeq().equality().expr(1)) + if func.supexpr().expr(): # ^{expr} + end = convert_expr(func.supexpr().expr()) + else: # ^atom + end = convert_atom(func.supexpr().atom()) + + if name == "summation": + return sympy.Sum(val, (iter_var, start, end)) + elif name == "product": + return sympy.Product(val, (iter_var, start, end)) + + +def handle_limit(func): + sub = func.limit_sub() + if sub.LETTER(): + var = sympy.Symbol(sub.LETTER().getText()) + elif sub.SYMBOL(): + var = sympy.Symbol(sub.SYMBOL().getText()[1:]) + else: + var = sympy.Symbol('x') + if sub.SUB(): + direction = "-" + elif sub.ADD(): + direction = "+" + else: + direction = "+-" + approaching = convert_expr(sub.expr()) + content = convert_mp(func.mp()) + + return sympy.Limit(content, var, approaching, direction) + + +def get_differential_var(d): + text = get_differential_var_str(d.getText()) + return sympy.Symbol(text) + + +def get_differential_var_str(text): + for i in range(1, len(text)): + c = text[i] + if not (c == " " or c == "\r" or c == "\n" or c == "\t"): + idx = i + break + text = text[idx:] + if text[0] == "\\": + text = text[1:] + return text diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/errors.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..d8c3ef9f06279df42d4b2054acc4cfe39b6682a5 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/errors.py @@ -0,0 +1,2 @@ +class LaTeXParsingError(Exception): + pass diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__init__.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..92e58d3172e100cc376d0b416b3835d164bd5647 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__init__.py @@ -0,0 +1,2 @@ +from .latex_parser import parse_latex_lark, LarkLaTeXParser # noqa +from .transformer import TransformToSymPyExpr # noqa diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/__init__.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..02457b72834a2e5ea60f7ddb79284cad76c07516 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/latex_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/latex_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f10092d58f282fca6c95afc696f51b7e05b3330 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/latex_parser.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/transformer.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aac787f4cf3f26edf05bc2ed623febfbcc7fe084 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/__pycache__/transformer.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/grammar/greek_symbols.lark b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/grammar/greek_symbols.lark new file mode 100644 index 0000000000000000000000000000000000000000..7439fab9dcac284dc3c9b5fbfa4fc6db8b29dfd2 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/grammar/greek_symbols.lark @@ -0,0 +1,28 @@ +// Greek symbols +// TODO: Shouold we include the uppercase variants for the symbols where the uppercase variant doesn't have a separate meaning? +ALPHA: "\\alpha" +BETA: "\\beta" +GAMMA: "\\gamma" +DELTA: "\\delta" // TODO: Should this be included? Delta usually denotes other things. +EPSILON: "\\epsilon" | "\\varepsilon" +ZETA: "\\zeta" +ETA: "\\eta" +THETA: "\\theta" | "\\vartheta" +// TODO: Should I add iota to the list? +KAPPA: "\\kappa" +LAMBDA: "\\lambda" // TODO: What about the uppercase variant? +MU: "\\mu" +NU: "\\nu" +XI: "\\xi" +// TODO: Should there be a separate note for transforming \pi into sympy.pi? +RHO: "\\rho" | "\\varrho" +// TODO: What should we do about sigma? +TAU: "\\tau" +UPSILON: "\\upsilon" +PHI: "\\phi" | "\\varphi" +CHI: "\\chi" +PSI: "\\psi" +OMEGA: "\\omega" + +GREEK_SYMBOL: ALPHA | BETA | GAMMA | DELTA | EPSILON | ZETA | ETA | THETA | KAPPA + | LAMBDA | MU | NU | XI | RHO | TAU | UPSILON | PHI | CHI | PSI | OMEGA diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/grammar/latex.lark b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/grammar/latex.lark new file mode 100644 index 0000000000000000000000000000000000000000..51f998ef9576b9df93c56a6c937b10d7c03e4aee --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/grammar/latex.lark @@ -0,0 +1,327 @@ +%ignore /[ \t\n\r]+/ + +%ignore "\\," | "\\thinspace" | "\\:" | "\\medspace" | "\\;" | "\\thickspace" +%ignore "\\quad" | "\\qquad" +%ignore "\\!" | "\\negthinspace" | "\\negmedspace" | "\\negthickspace" +%ignore "\\vrule" | "\\vcenter" | "\\vbox" | "\\vskip" | "\\vspace" | "\\hfill" +%ignore "\\*" | "\\-" | "\\." | "\\/" | "\\\\" | "\\(" | "\\=" + +%ignore "\\left" | "\\right" +%ignore "\\limits" | "\\nolimits" +%ignore "\\displaystyle" + +///////////////////// tokens /////////////////////// + +// basic binary operators +ADD: "+" +SUB: "-" +MUL: "*" +DIV: "/" + +// tokens with distinct left and right symbols +L_BRACE: "{" +R_BRACE: "}" +L_BRACE_LITERAL: "\\{" +R_BRACE_LITERAL: "\\}" +L_BRACKET: "[" +R_BRACKET: "]" +L_CEIL: "\\lceil" +R_CEIL: "\\rceil" +L_FLOOR: "\\lfloor" +R_FLOOR: "\\rfloor" +L_PAREN: "(" +R_PAREN: ")" + +// limit, integral, sum, and product symbols +FUNC_LIM: "\\lim" +LIM_APPROACH_SYM: "\\to" | "\\rightarrow" | "\\Rightarrow" | "\\longrightarrow" | "\\Longrightarrow" +FUNC_INT: "\\int" | "\\intop" +FUNC_SUM: "\\sum" +FUNC_PROD: "\\prod" + +// common functions +FUNC_EXP: "\\exp" +FUNC_LOG: "\\log" +FUNC_LN: "\\ln" +FUNC_LG: "\\lg" +FUNC_MIN: "\\min" +FUNC_MAX: "\\max" + +// trigonometric functions +FUNC_SIN: "\\sin" +FUNC_COS: "\\cos" +FUNC_TAN: "\\tan" +FUNC_CSC: "\\csc" +FUNC_SEC: "\\sec" +FUNC_COT: "\\cot" + +// inverse trigonometric functions +FUNC_ARCSIN: "\\arcsin" +FUNC_ARCCOS: "\\arccos" +FUNC_ARCTAN: "\\arctan" +FUNC_ARCCSC: "\\arccsc" +FUNC_ARCSEC: "\\arcsec" +FUNC_ARCCOT: "\\arccot" + +// hyperbolic trigonometric functions +FUNC_SINH: "\\sinh" +FUNC_COSH: "\\cosh" +FUNC_TANH: "\\tanh" +FUNC_ARSINH: "\\arsinh" +FUNC_ARCOSH: "\\arcosh" +FUNC_ARTANH: "\\artanh" + +FUNC_SQRT: "\\sqrt" + +// miscellaneous symbols +CMD_TIMES: "\\times" +CMD_CDOT: "\\cdot" +CMD_DIV: "\\div" +CMD_FRAC: "\\frac" | "\\dfrac" | "\\tfrac" | "\\nicefrac" +CMD_BINOM: "\\binom" | "\\dbinom" | "\\tbinom" +CMD_OVERLINE: "\\overline" +CMD_LANGLE: "\\langle" +CMD_RANGLE: "\\rangle" + +CMD_MATHIT: "\\mathit" + +CMD_INFTY: "\\infty" + +BANG: "!" +BAR: "|" +CARET: "^" +COLON: ":" +UNDERSCORE: "_" + +// relational symbols +EQUAL: "=" +NOT_EQUAL: "\\neq" | "\\ne" +LT: "<" +LTE: "\\leq" | "\\le" | "\\leqslant" +GT: ">" +GTE: "\\geq" | "\\ge" | "\\geqslant" + +DIV_SYMBOL: CMD_DIV | DIV +MUL_SYMBOL: MUL | CMD_TIMES | CMD_CDOT + +%import .greek_symbols.GREEK_SYMBOL + +UPRIGHT_DIFFERENTIAL_SYMBOL: "\\text{d}" | "\\mathrm{d}" +DIFFERENTIAL_SYMBOL: "d" | UPRIGHT_DIFFERENTIAL_SYMBOL + +// disallow "d" as a variable name because we want to parse "d" as a differential symbol. +SYMBOL: /[a-zA-Z]/ +BASIC_SUBSCRIPTED_SYMBOL: /([a-zA-Z])_(([A-Za-z0-9]|[a-zA-Z]+)|\{([A-Za-z0-9]|[a-zA-Z]+)\})/ +SYMBOL_WITH_GREEK_SUBSCRIPT: /([a-zA-Z])_/ GREEK_SYMBOL | /([a-zA-Z])_/ L_BRACE GREEK_SYMBOL R_BRACE +// best to define the variant with braces like that instead of shoving it all into one case like in +// /([a-zA-Z])_/ L_BRACE? GREEK_SYMBOL R_BRACE? because then we can easily error out on input like +// r"h_{\theta" +GREEK_SUBSCRIPTED_SYMBOL: GREEK_SYMBOL /_(([A-Za-z0-9]|[a-zA-Z]+)|\{([A-Za-z0-9]|[a-zA-Z]+)\})/ + +%import common.DIGIT -> DIGIT + +//////////////////// grammar ////////////////////// + +latex_string: _relation | _expression + +_one_letter_symbol: SYMBOL + | BASIC_SUBSCRIPTED_SYMBOL + | SYMBOL_WITH_GREEK_SUBSCRIPT + | GREEK_SUBSCRIPTED_SYMBOL + | GREEK_SYMBOL +multi_letter_symbol: CMD_MATHIT L_BRACE /[a-zA-Z]+(\s+[a-zA-Z]+)*/ R_BRACE +number: /\d+(\.\d*)?/ + +_atomic_expr: _one_letter_symbol + | multi_letter_symbol + | number + | CMD_INFTY + +group_round_parentheses: L_PAREN _expression R_PAREN +group_square_brackets: L_BRACKET _expression R_BRACKET +group_curly_parentheses: L_BRACE _expression R_BRACE + +_relation: eq | ne | lt | lte | gt | gte + +eq: _expression EQUAL _expression +ne: _expression NOT_EQUAL _expression +lt: _expression LT _expression +lte: _expression LTE _expression +gt: _expression GT _expression +gte: _expression GTE _expression + +_expression_core: _atomic_expr | group_curly_parentheses + +add: _expression ADD _expression_mul +sub: _expression SUB _expression_mul + | SUB _expression_mul +mul: _expression_mul MUL_SYMBOL _expression_power +div: _expression_mul DIV_SYMBOL _expression_power + +adjacent_expressions: (_one_letter_symbol | number) _expression_mul + | group_round_parentheses (group_round_parentheses | _one_letter_symbol) + | _function _function + | fraction _expression + +_expression_func: _expression_core + | group_round_parentheses + | fraction + | binomial + | _function + +_expression_power: _expression_func | superscript + +_expression_mul: _expression_power + | mul | div | adjacent_expressions + | _integral// | derivative + | summation | product + | limit + +_expression: _expression_mul | add | sub + +_limit_dir: "+" | "-" | L_BRACE ("+" | "-") R_BRACE + +limit_dir_expr: _expression CARET _limit_dir + +group_curly_parentheses_lim: L_BRACE _expression LIM_APPROACH_SYM (limit_dir_expr | _expression) R_BRACE + +limit: FUNC_LIM UNDERSCORE group_curly_parentheses_lim _expression + +differential: DIFFERENTIAL_SYMBOL _one_letter_symbol + +//_derivative_operator: CMD_FRAC L_BRACE DIFFERENTIAL_SYMBOL R_BRACE L_BRACE differential R_BRACE + +//derivative: _derivative_operator _expression + +_integral: normal_integral | integral_with_special_fraction + +normal_integral: FUNC_INT _expression DIFFERENTIAL_SYMBOL _one_letter_symbol + | FUNC_INT (CARET _expression_core UNDERSCORE _expression_core)? _expression? DIFFERENTIAL_SYMBOL _one_letter_symbol + | FUNC_INT (UNDERSCORE _expression_core CARET _expression_core)? _expression? DIFFERENTIAL_SYMBOL _one_letter_symbol + +group_curly_parentheses_int: L_BRACE _expression? differential R_BRACE + +special_fraction: CMD_FRAC group_curly_parentheses_int group_curly_parentheses + +integral_with_special_fraction: FUNC_INT special_fraction + | FUNC_INT (CARET _expression_core UNDERSCORE _expression_core)? special_fraction + | FUNC_INT (UNDERSCORE _expression_core CARET _expression_core)? special_fraction + +group_curly_parentheses_special: UNDERSCORE L_BRACE _atomic_expr EQUAL _atomic_expr R_BRACE CARET _expression_core + | CARET _expression_core UNDERSCORE L_BRACE _atomic_expr EQUAL _atomic_expr R_BRACE + +summation: FUNC_SUM group_curly_parentheses_special _expression + | FUNC_SUM group_curly_parentheses_special _expression + +product: FUNC_PROD group_curly_parentheses_special _expression + | FUNC_PROD group_curly_parentheses_special _expression + +superscript: _expression_func CARET _expression_power + +fraction: _basic_fraction + | _simple_fraction + | _general_fraction + +_basic_fraction: CMD_FRAC DIGIT (DIGIT | SYMBOL | GREEK_SYMBOL) + +_simple_fraction: CMD_FRAC DIGIT group_curly_parentheses + | CMD_FRAC group_curly_parentheses (DIGIT | SYMBOL | GREEK_SYMBOL) + +_general_fraction: CMD_FRAC group_curly_parentheses group_curly_parentheses + +binomial: _basic_binomial + | _simple_binomial + | _general_binomial + +_basic_binomial: CMD_BINOM DIGIT (DIGIT | SYMBOL | GREEK_SYMBOL) + +_simple_binomial: CMD_BINOM DIGIT group_curly_parentheses + | CMD_BINOM group_curly_parentheses (DIGIT | SYMBOL | GREEK_SYMBOL) + +_general_binomial: CMD_BINOM group_curly_parentheses group_curly_parentheses + +list_of_expressions: _expression ("," _expression)* + +function_applied: _one_letter_symbol L_PAREN list_of_expressions R_PAREN + +min: FUNC_MIN L_PAREN list_of_expressions R_PAREN + +max: FUNC_MAX L_PAREN list_of_expressions R_PAREN + +bra: CMD_LANGLE _expression BAR + +ket: BAR _expression CMD_RANGLE + +inner_product: CMD_LANGLE _expression BAR _expression CMD_RANGLE + +_function: function_applied + | abs | floor | ceil + | _trigonometric_function | _inverse_trigonometric_function + | _trigonometric_function_power + | _hyperbolic_trigonometric_function | _inverse_hyperbolic_trigonometric_function + | exponential + | log + | square_root + | factorial + | conjugate + | max | min + | bra | ket | inner_product + +exponential: FUNC_EXP _expression + +log: FUNC_LOG _expression + | FUNC_LN _expression + | FUNC_LG _expression + | FUNC_LOG UNDERSCORE (DIGIT | _one_letter_symbol) _expression + | FUNC_LOG UNDERSCORE group_curly_parentheses _expression + +square_root: FUNC_SQRT group_curly_parentheses + | FUNC_SQRT group_square_brackets group_curly_parentheses + +factorial: _expression BANG + +conjugate: CMD_OVERLINE group_curly_parentheses + | CMD_OVERLINE DIGIT + +_trigonometric_function: sin | cos | tan | csc | sec | cot + +sin: FUNC_SIN _expression +cos: FUNC_COS _expression +tan: FUNC_TAN _expression +csc: FUNC_CSC _expression +sec: FUNC_SEC _expression +cot: FUNC_COT _expression + +_trigonometric_function_power: sin_power | cos_power | tan_power | csc_power | sec_power | cot_power + +sin_power: FUNC_SIN CARET _expression_core _expression +cos_power: FUNC_COS CARET _expression_core _expression +tan_power: FUNC_TAN CARET _expression_core _expression +csc_power: FUNC_CSC CARET _expression_core _expression +sec_power: FUNC_SEC CARET _expression_core _expression +cot_power: FUNC_COT CARET _expression_core _expression + +_hyperbolic_trigonometric_function: sinh | cosh | tanh + +sinh: FUNC_SINH _expression +cosh: FUNC_COSH _expression +tanh: FUNC_TANH _expression + +_inverse_trigonometric_function: arcsin | arccos | arctan | arccsc | arcsec | arccot + +arcsin: FUNC_ARCSIN _expression +arccos: FUNC_ARCCOS _expression +arctan: FUNC_ARCTAN _expression +arccsc: FUNC_ARCCSC _expression +arcsec: FUNC_ARCSEC _expression +arccot: FUNC_ARCCOT _expression + +_inverse_hyperbolic_trigonometric_function: asinh | acosh | atanh + +asinh: FUNC_ARSINH _expression +acosh: FUNC_ARCOSH _expression +atanh: FUNC_ARTANH _expression + +abs: BAR _expression BAR +floor: L_FLOOR _expression R_FLOOR +ceil: L_CEIL _expression R_CEIL diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/latex_parser.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/latex_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..bd14f7e162df3cbba39a5d665e841e16a66a14ef --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/latex_parser.py @@ -0,0 +1,146 @@ +import os +import logging +import re + +from sympy.external import import_module +from sympy.parsing.latex.lark.transformer import TransformToSymPyExpr + +_lark = import_module("lark") + + +class LarkLaTeXParser: + r"""Class for converting input `\mathrm{\LaTeX}` strings into SymPy Expressions. + It holds all the necessary internal data for doing so, and exposes hooks for + customizing its behavior. + + Parameters + ========== + + print_debug_output : bool, optional + + If set to ``True``, prints debug output to the logger. Defaults to ``False``. + + transform : bool, optional + + If set to ``True``, the class runs the Transformer class on the parse tree + generated by running ``Lark.parse`` on the input string. Defaults to ``True``. + + Setting it to ``False`` can help with debugging the `\mathrm{\LaTeX}` grammar. + + grammar_file : str, optional + + The path to the grammar file that the parser should use. If set to ``None``, + it uses the default grammar, which is in ``grammar/latex.lark``, relative to + the ``sympy/parsing/latex/lark/`` directory. + + transformer : str, optional + + The name of the Transformer class to use. If set to ``None``, it uses the + default transformer class, which is :py:func:`TransformToSymPyExpr`. + + """ + def __init__(self, print_debug_output=False, transform=True, grammar_file=None, transformer=None): + grammar_dir_path = os.path.join(os.path.dirname(__file__), "grammar/") + + if grammar_file is None: + with open(os.path.join(grammar_dir_path, "latex.lark"), encoding="utf-8") as f: + latex_grammar = f.read() + else: + with open(grammar_file, encoding="utf-8") as f: + latex_grammar = f.read() + + self.parser = _lark.Lark( + latex_grammar, + source_path=grammar_dir_path, + parser="earley", + start="latex_string", + lexer="auto", + ambiguity="explicit", + propagate_positions=False, + maybe_placeholders=False, + keep_all_tokens=True) + + self.print_debug_output = print_debug_output + self.transform_expr = transform + + if transformer is None: + self.transformer = TransformToSymPyExpr() + else: + self.transformer = transformer() + + def doparse(self, s: str): + if self.print_debug_output: + _lark.logger.setLevel(logging.DEBUG) + + parse_tree = self.parser.parse(s) + + if not self.transform_expr: + # exit early and return the parse tree + _lark.logger.debug("expression = %s", s) + _lark.logger.debug(parse_tree) + _lark.logger.debug(parse_tree.pretty()) + return parse_tree + + if self.print_debug_output: + # print this stuff before attempting to run the transformer + _lark.logger.debug("expression = %s", s) + # print the `parse_tree` variable + _lark.logger.debug(parse_tree.pretty()) + + sympy_expression = self.transformer.transform(parse_tree) + + if self.print_debug_output: + _lark.logger.debug("SymPy expression = %s", sympy_expression) + + return sympy_expression + + +if _lark is not None: + _lark_latex_parser = LarkLaTeXParser() + + +def parse_latex_lark(s: str): + """ + Experimental LaTeX parser using Lark. + + This function is still under development and its API may change with the + next releases of SymPy. + """ + if _lark is None: + raise ImportError("Lark is probably not installed") + return _lark_latex_parser.doparse(s) + + +def _pretty_print_lark_trees(tree, indent=0, show_expr=True): + if isinstance(tree, _lark.Token): + return tree.value + + data = str(tree.data) + + is_expr = data.startswith("expression") + + if is_expr: + data = re.sub(r"^expression", "E", data) + + is_ambig = (data == "_ambig") + + if is_ambig: + new_indent = indent + 2 + else: + new_indent = indent + + output = "" + show_node = not is_expr or show_expr + + if show_node: + output += str(data) + "(" + + if is_ambig: + output += "\n" + "\n".join([" " * new_indent + _pretty_print_lark_trees(i, new_indent, show_expr) for i in tree.children]) + else: + output += ",".join([_pretty_print_lark_trees(i, new_indent, show_expr) for i in tree.children]) + + if show_node: + output += ")" + + return output diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/transformer.py b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..af76a9d496ac50b73d23f34024d81fd2a7ecbe65 --- /dev/null +++ b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/lark/transformer.py @@ -0,0 +1,557 @@ +import re + +import sympy +from sympy.external import import_module +from sympy.parsing.latex.errors import LaTeXParsingError + +lark = import_module("lark") + +if lark: + from lark import Transformer, Token # type: ignore +else: + class Transformer: # type: ignore + def transform(self, *args): + pass + + + class Token: # type: ignore + pass + + +# noinspection PyPep8Naming,PyMethodMayBeStatic +class TransformToSymPyExpr(Transformer): + """Returns a SymPy expression that is generated by traversing the ``lark.Tree`` + passed to the ``.transform()`` function. + + Notes + ===== + + **This class is never supposed to be used directly.** + + In order to tweak the behavior of this class, it has to be subclassed and then after + the required modifications are made, the name of the new class should be passed to + the :py:class:`LarkLaTeXParser` class by using the ``transformer`` argument in the + constructor. + + Parameters + ========== + + visit_tokens : bool, optional + For information about what this option does, see `here + `_. + + Note that the option must be set to ``True`` for the default parser to work. + """ + + SYMBOL = sympy.Symbol + DIGIT = sympy.core.numbers.Integer + + def CMD_INFTY(self, tokens): + return sympy.oo + + def GREEK_SYMBOL(self, tokens): + # we omit the first character because it is a backslash. Also, if the variable name has "var" in it, + # like "varphi" or "varepsilon", we remove that too + variable_name = re.sub("var", "", tokens[1:]) + + return sympy.Symbol(variable_name) + + def BASIC_SUBSCRIPTED_SYMBOL(self, tokens): + symbol, sub = tokens.value.split("_") + if sub.startswith("{"): + return sympy.Symbol("%s_{%s}" % (symbol, sub[1:-1])) + else: + return sympy.Symbol("%s_{%s}" % (symbol, sub)) + + def GREEK_SUBSCRIPTED_SYMBOL(self, tokens): + greek_letter, sub = tokens.value.split("_") + greek_letter = re.sub("var", "", greek_letter[1:]) + + if sub.startswith("{"): + return sympy.Symbol("%s_{%s}" % (greek_letter, sub[1:-1])) + else: + return sympy.Symbol("%s_{%s}" % (greek_letter, sub)) + + def SYMBOL_WITH_GREEK_SUBSCRIPT(self, tokens): + symbol, sub = tokens.value.split("_") + if sub.startswith("{"): + greek_letter = sub[2:-1] + greek_letter = re.sub("var", "", greek_letter) + + return sympy.Symbol("%s_{%s}" % (symbol, greek_letter)) + else: + greek_letter = sub[1:] + greek_letter = re.sub("var", "", greek_letter) + + return sympy.Symbol("%s_{%s}" % (symbol, greek_letter)) + + def multi_letter_symbol(self, tokens): + return sympy.Symbol(tokens[2]) + + def number(self, tokens): + if "." in tokens[0]: + return sympy.core.numbers.Float(tokens[0]) + else: + return sympy.core.numbers.Integer(tokens[0]) + + def latex_string(self, tokens): + return tokens[0] + + def group_round_parentheses(self, tokens): + return tokens[1] + + def group_square_brackets(self, tokens): + return tokens[1] + + def group_curly_parentheses(self, tokens): + return tokens[1] + + def eq(self, tokens): + return sympy.Eq(tokens[0], tokens[2]) + + def ne(self, tokens): + return sympy.Ne(tokens[0], tokens[2]) + + def lt(self, tokens): + return sympy.Lt(tokens[0], tokens[2]) + + def lte(self, tokens): + return sympy.Le(tokens[0], tokens[2]) + + def gt(self, tokens): + return sympy.Gt(tokens[0], tokens[2]) + + def gte(self, tokens): + return sympy.Ge(tokens[0], tokens[2]) + + def add(self, tokens): + return sympy.Add(tokens[0], tokens[2]) + + def sub(self, tokens): + if len(tokens) == 2: + return -tokens[1] + elif len(tokens) == 3: + return sympy.Add(tokens[0], -tokens[2]) + + def mul(self, tokens): + return sympy.Mul(tokens[0], tokens[2]) + + def div(self, tokens): + return sympy.Mul(tokens[0], sympy.Pow(tokens[2], -1)) + + def adjacent_expressions(self, tokens): + # Most of the time, if two expressions are next to each other, it means implicit multiplication, + # but not always + from sympy.physics.quantum import Bra, Ket + if isinstance(tokens[0], Ket) and isinstance(tokens[1], Bra): + from sympy.physics.quantum import OuterProduct + return OuterProduct(tokens[0], tokens[1]) + elif tokens[0] == sympy.Symbol("d"): + # If the leftmost token is a "d", then it is highly likely that this is a differential + return tokens[0], tokens[1] + elif isinstance(tokens[0], tuple): + # then we have a derivative + return sympy.Derivative(tokens[1], tokens[0][1]) + else: + return sympy.Mul(tokens[0], tokens[1]) + + def superscript(self, tokens): + return sympy.Pow(tokens[0], tokens[2]) + + def fraction(self, tokens): + numerator = tokens[1] + if isinstance(tokens[2], tuple): + # we only need the variable w.r.t. which we are differentiating + _, variable = tokens[2] + + # we will pass this information upwards + return "derivative", variable + else: + denominator = tokens[2] + return sympy.Mul(numerator, sympy.Pow(denominator, -1)) + + def binomial(self, tokens): + return sympy.binomial(tokens[1], tokens[2]) + + def normal_integral(self, tokens): + underscore_index = None + caret_index = None + + if "_" in tokens: + # we need to know the index because the next item in the list is the + # arguments for the lower bound of the integral + underscore_index = tokens.index("_") + + if "^" in tokens: + # we need to know the index because the next item in the list is the + # arguments for the upper bound of the integral + caret_index = tokens.index("^") + + lower_bound = tokens[underscore_index + 1] if underscore_index else None + upper_bound = tokens[caret_index + 1] if caret_index else None + + differential_symbol = self._extract_differential_symbol(tokens) + + if differential_symbol is None: + raise LaTeXParsingError("Differential symbol was not found in the expression." + "Valid differential symbols are \"d\", \"\\text{d}, and \"\\mathrm{d}\".") + + # else we can assume that a differential symbol was found + differential_variable_index = tokens.index(differential_symbol) + 1 + differential_variable = tokens[differential_variable_index] + + # we can't simply do something like `if (lower_bound and not upper_bound) ...` because this would + # evaluate to `True` if the `lower_bound` is 0 and upper bound is non-zero + if lower_bound is not None and upper_bound is None: + # then one was given and the other wasn't + raise LaTeXParsingError("Lower bound for the integral was found, but upper bound was not found.") + + if upper_bound is not None and lower_bound is None: + # then one was given and the other wasn't + raise LaTeXParsingError("Upper bound for the integral was found, but lower bound was not found.") + + # check if any expression was given or not. If it wasn't, then set the integrand to 1. + if underscore_index is not None and underscore_index == differential_variable_index - 3: + # The Token at differential_variable_index - 2 should be the integrand. However, if going one more step + # backwards after that gives us the underscore, then that means that there _was_ no integrand. + # Example: \int^7_0 dx + integrand = 1 + elif caret_index is not None and caret_index == differential_variable_index - 3: + # The Token at differential_variable_index - 2 should be the integrand. However, if going one more step + # backwards after that gives us the caret, then that means that there _was_ no integrand. + # Example: \int_0^7 dx + integrand = 1 + elif differential_variable_index == 2: + # this means we have something like "\int dx", because the "\int" symbol will always be + # at index 0 in `tokens` + integrand = 1 + else: + # The Token at differential_variable_index - 1 is the differential symbol itself, so we need to go one + # more step before that. + integrand = tokens[differential_variable_index - 2] + + if lower_bound is not None: + # then we have a definite integral + + # we can assume that either both the lower and upper bounds are given, or + # neither of them are + return sympy.Integral(integrand, (differential_variable, lower_bound, upper_bound)) + else: + # we have an indefinite integral + return sympy.Integral(integrand, differential_variable) + + def group_curly_parentheses_int(self, tokens): + # return signature is a tuple consisting of the expression in the numerator, along with the variable of + # integration + if len(tokens) == 3: + return 1, tokens[1] + elif len(tokens) == 4: + return tokens[1], tokens[2] + # there are no other possibilities + + def special_fraction(self, tokens): + numerator, variable = tokens[1] + denominator = tokens[2] + + # We pass the integrand, along with information about the variable of integration, upw + return sympy.Mul(numerator, sympy.Pow(denominator, -1)), variable + + def integral_with_special_fraction(self, tokens): + underscore_index = None + caret_index = None + + if "_" in tokens: + # we need to know the index because the next item in the list is the + # arguments for the lower bound of the integral + underscore_index = tokens.index("_") + + if "^" in tokens: + # we need to know the index because the next item in the list is the + # arguments for the upper bound of the integral + caret_index = tokens.index("^") + + lower_bound = tokens[underscore_index + 1] if underscore_index else None + upper_bound = tokens[caret_index + 1] if caret_index else None + + # we can't simply do something like `if (lower_bound and not upper_bound) ...` because this would + # evaluate to `True` if the `lower_bound` is 0 and upper bound is non-zero + if lower_bound is not None and upper_bound is None: + # then one was given and the other wasn't + raise LaTeXParsingError("Lower bound for the integral was found, but upper bound was not found.") + + if upper_bound is not None and lower_bound is None: + # then one was given and the other wasn't + raise LaTeXParsingError("Upper bound for the integral was found, but lower bound was not found.") + + integrand, differential_variable = tokens[-1] + + if lower_bound is not None: + # then we have a definite integral + + # we can assume that either both the lower and upper bounds are given, or + # neither of them are + return sympy.Integral(integrand, (differential_variable, lower_bound, upper_bound)) + else: + # we have an indefinite integral + return sympy.Integral(integrand, differential_variable) + + def group_curly_parentheses_special(self, tokens): + underscore_index = tokens.index("_") + caret_index = tokens.index("^") + + # given the type of expressions we are parsing, we can assume that the lower limit + # will always use braces around its arguments. This is because we don't support + # converting unconstrained sums into SymPy expressions. + + # first we isolate the bottom limit + left_brace_index = tokens.index("{", underscore_index) + right_brace_index = tokens.index("}", underscore_index) + + bottom_limit = tokens[left_brace_index + 1: right_brace_index] + + # next, we isolate the upper limit + top_limit = tokens[caret_index + 1:] + + # the code below will be useful for supporting things like `\sum_{n = 0}^{n = 5} n^2` + # if "{" in top_limit: + # left_brace_index = tokens.index("{", caret_index) + # if left_brace_index != -1: + # # then there's a left brace in the string, and we need to find the closing right brace + # right_brace_index = tokens.index("}", caret_index) + # top_limit = tokens[left_brace_index + 1: right_brace_index] + + # print(f"top limit = {top_limit}") + + index_variable = bottom_limit[0] + lower_limit = bottom_limit[-1] + upper_limit = top_limit[0] # for now, the index will always be 0 + + # print(f"return value = ({index_variable}, {lower_limit}, {upper_limit})") + + return index_variable, lower_limit, upper_limit + + def summation(self, tokens): + return sympy.Sum(tokens[2], tokens[1]) + + def product(self, tokens): + return sympy.Product(tokens[2], tokens[1]) + + def limit_dir_expr(self, tokens): + caret_index = tokens.index("^") + + if "{" in tokens: + left_curly_brace_index = tokens.index("{", caret_index) + direction = tokens[left_curly_brace_index + 1] + else: + direction = tokens[caret_index + 1] + + if direction == "+": + return tokens[0], "+" + elif direction == "-": + return tokens[0], "-" + else: + return tokens[0], "+-" + + def group_curly_parentheses_lim(self, tokens): + limit_variable = tokens[1] + if isinstance(tokens[3], tuple): + destination, direction = tokens[3] + else: + destination = tokens[3] + direction = "+-" + + return limit_variable, destination, direction + + def limit(self, tokens): + limit_variable, destination, direction = tokens[2] + + return sympy.Limit(tokens[-1], limit_variable, destination, direction) + + def differential(self, tokens): + return tokens[1] + + def derivative(self, tokens): + return sympy.Derivative(tokens[-1], tokens[5]) + + def list_of_expressions(self, tokens): + if len(tokens) == 1: + # we return it verbatim because the function_applied node expects + # a list + return tokens + else: + def remove_tokens(args): + if isinstance(args, Token): + if args.type != "COMMA": + # An unexpected token was encountered + raise LaTeXParsingError("A comma token was expected, but some other token was encountered.") + return False + return True + + return filter(remove_tokens, tokens) + + def function_applied(self, tokens): + return sympy.Function(tokens[0])(*tokens[2]) + + def min(self, tokens): + return sympy.Min(*tokens[2]) + + def max(self, tokens): + return sympy.Max(*tokens[2]) + + def bra(self, tokens): + from sympy.physics.quantum import Bra + return Bra(tokens[1]) + + def ket(self, tokens): + from sympy.physics.quantum import Ket + return Ket(tokens[1]) + + def inner_product(self, tokens): + from sympy.physics.quantum import Bra, Ket, InnerProduct + return InnerProduct(Bra(tokens[1]), Ket(tokens[3])) + + def sin(self, tokens): + return sympy.sin(tokens[1]) + + def cos(self, tokens): + return sympy.cos(tokens[1]) + + def tan(self, tokens): + return sympy.tan(tokens[1]) + + def csc(self, tokens): + return sympy.csc(tokens[1]) + + def sec(self, tokens): + return sympy.sec(tokens[1]) + + def cot(self, tokens): + return sympy.cot(tokens[1]) + + def sin_power(self, tokens): + exponent = tokens[2] + if exponent == -1: + return sympy.asin(tokens[-1]) + else: + return sympy.Pow(sympy.sin(tokens[-1]), exponent) + + def cos_power(self, tokens): + exponent = tokens[2] + if exponent == -1: + return sympy.acos(tokens[-1]) + else: + return sympy.Pow(sympy.cos(tokens[-1]), exponent) + + def tan_power(self, tokens): + exponent = tokens[2] + if exponent == -1: + return sympy.atan(tokens[-1]) + else: + return sympy.Pow(sympy.tan(tokens[-1]), exponent) + + def csc_power(self, tokens): + exponent = tokens[2] + if exponent == -1: + return sympy.acsc(tokens[-1]) + else: + return sympy.Pow(sympy.csc(tokens[-1]), exponent) + + def sec_power(self, tokens): + exponent = tokens[2] + if exponent == -1: + return sympy.asec(tokens[-1]) + else: + return sympy.Pow(sympy.sec(tokens[-1]), exponent) + + def cot_power(self, tokens): + exponent = tokens[2] + if exponent == -1: + return sympy.acot(tokens[-1]) + else: + return sympy.Pow(sympy.cot(tokens[-1]), exponent) + + def arcsin(self, tokens): + return sympy.asin(tokens[1]) + + def arccos(self, tokens): + return sympy.acos(tokens[1]) + + def arctan(self, tokens): + return sympy.atan(tokens[1]) + + def arccsc(self, tokens): + return sympy.acsc(tokens[1]) + + def arcsec(self, tokens): + return sympy.asec(tokens[1]) + + def arccot(self, tokens): + return sympy.acot(tokens[1]) + + def sinh(self, tokens): + return sympy.sinh(tokens[1]) + + def cosh(self, tokens): + return sympy.cosh(tokens[1]) + + def tanh(self, tokens): + return sympy.tanh(tokens[1]) + + def asinh(self, tokens): + return sympy.asinh(tokens[1]) + + def acosh(self, tokens): + return sympy.acosh(tokens[1]) + + def atanh(self, tokens): + return sympy.atanh(tokens[1]) + + def abs(self, tokens): + return sympy.Abs(tokens[1]) + + def floor(self, tokens): + return sympy.floor(tokens[1]) + + def ceil(self, tokens): + return sympy.ceiling(tokens[1]) + + def factorial(self, tokens): + return sympy.factorial(tokens[0]) + + def conjugate(self, tokens): + return sympy.conjugate(tokens[1]) + + def square_root(self, tokens): + if len(tokens) == 2: + # then there was no square bracket argument + return sympy.sqrt(tokens[1]) + elif len(tokens) == 3: + # then there _was_ a square bracket argument + return sympy.root(tokens[2], tokens[1]) + + def exponential(self, tokens): + return sympy.exp(tokens[1]) + + def log(self, tokens): + if tokens[0].type == "FUNC_LG": + # we don't need to check if there's an underscore or not because having one + # in this case would be meaningless + # TODO: ANTLR refers to ISO 80000-2:2019. should we keep base 10 or base 2? + return sympy.log(tokens[1], 10) + elif tokens[0].type == "FUNC_LN": + return sympy.log(tokens[1]) + elif tokens[0].type == "FUNC_LOG": + # we check if a base was specified or not + if "_" in tokens: + # then a base was specified + return sympy.log(tokens[3], tokens[2]) + else: + # a base was not specified + return sympy.log(tokens[1]) + + def _extract_differential_symbol(self, s: str): + differential_symbols = {"d", r"\text{d}", r"\mathrm{d}"} + + differential_symbol = next((symbol for symbol in differential_symbols if symbol in s), None) + + return differential_symbol diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/__init__.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b47d7552f57d6a9b6c6000ead3e33a2336a09d8 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_autolev.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_autolev.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..caccc636246cdd6af8668a3a545821aadc02f1cc Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_autolev.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_c_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_c_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..57ab6b543cbe2d2711cc48988dce36d0e6896ed4 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_c_parser.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_fortran_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_fortran_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2bcc67481f49b9939e92096ee8a1ab09962cd2f3 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_fortran_parser.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_implicit_multiplication_application.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_implicit_multiplication_application.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..faeed3be41794a828cfa610b282a87b3ad4fa90c Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_implicit_multiplication_application.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_latex.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_latex.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb782639c694d84fd7b6cb763f0950d7488a303a Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_latex.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_latex_deps.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_latex_deps.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b414f0d4ee2907601cdcb6bb2f75e1a41002d84 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_latex_deps.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_mathematica.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_mathematica.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..525b8428aee741ad6a550da122c88d1c19a18aba Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_mathematica.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_sym_expr.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_sym_expr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19948f6d23a722ac61a25b60db408e2fd3b0dbbb Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_sym_expr.cpython-310.pyc differ diff --git a/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_sympy_parser.cpython-310.pyc b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_sympy_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b20b58cc114052bccba56992e26faf8bf99fc4d6 Binary files /dev/null and b/deepseekvl2/lib/python3.10/site-packages/sympy/parsing/tests/__pycache__/test_sympy_parser.cpython-310.pyc differ