ZTWHHH commited on
Commit
b4b665d
·
verified ·
1 Parent(s): 86efc46

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. deepseek/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc +0 -0
  3. deepseek/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc +0 -0
  4. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc +0 -0
  5. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc +0 -0
  6. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc +0 -0
  7. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc +0 -0
  8. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc +0 -0
  9. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc +0 -0
  10. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc +0 -0
  11. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc +0 -0
  12. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc +0 -0
  13. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc +0 -0
  14. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc +0 -0
  15. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc +0 -0
  16. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc +0 -0
  17. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc +0 -0
  18. deepseek/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc +0 -0
  19. deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc +0 -0
  20. deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc +0 -0
  21. deepseek/lib/python3.10/site-packages/httpx/_transports/base.py +86 -0
  22. deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/INSTALLER +1 -0
  23. deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE +202 -0
  24. deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA +133 -0
  25. deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/REQUESTED +0 -0
  26. deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL +5 -0
  27. deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/top_level.txt +1 -0
  28. deepseek/lib/python3.10/site-packages/lmformatenforcer/analyzer.py +77 -0
  29. deepseek/lib/python3.10/site-packages/lmformatenforcer/characterlevelparser.py +187 -0
  30. deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__init__.py +0 -0
  31. deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__pycache__/jsonschemaobjectutil.cpython-310.pyc +0 -0
  32. deepseek/lib/python3.10/site-packages/lmformatenforcer/external/jsonschemaobjectutil.py +231 -0
  33. deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/llamacpp.cpython-310.pyc +0 -0
  34. deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/transformers.cpython-310.pyc +0 -0
  35. deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/llamacpp.py +74 -0
  36. deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/transformers.py +146 -0
  37. deepseek/lib/python3.10/site-packages/lmformatenforcer/regexparser.py +85 -0
  38. deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE +177 -0
  39. deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA +102 -0
  40. deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc +3 -0
  41. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/__init__.cpython-310.pyc +0 -0
  42. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/ast_parser.cpython-310.pyc +0 -0
  43. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/mathematica.cpython-310.pyc +0 -0
  44. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/maxima.cpython-310.pyc +0 -0
  45. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sym_expr.cpython-310.pyc +0 -0
  46. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-310.pyc +0 -0
  47. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LICENSE.txt +21 -0
  48. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LaTeX.g4 +312 -0
  49. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__init__.py +66 -0
  50. deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -676,3 +676,4 @@ evalkit_tf437/lib/python3.10/site-packages/scipy/optimize/_highs/_highs_wrapper.
676
  deepseekvl2/lib/python3.10/site-packages/transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
677
  deepseekvl2/lib/python3.10/site-packages/transformers/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
678
  deepseekvl2/lib/python3.10/site-packages/transformers/__pycache__/modeling_outputs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
 
 
676
  deepseekvl2/lib/python3.10/site-packages/transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
677
  deepseekvl2/lib/python3.10/site-packages/transformers/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
678
  deepseekvl2/lib/python3.10/site-packages/transformers/__pycache__/modeling_outputs.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
679
+ deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
deepseek/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.87 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc ADDED
Binary file (277 Bytes). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc ADDED
Binary file (7.65 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_auth.cpython-310.pyc ADDED
Binary file (10.6 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc ADDED
Binary file (41.5 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc ADDED
Binary file (7.1 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc ADDED
Binary file (7.3 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc ADDED
Binary file (10.4 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_main.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc ADDED
Binary file (39.1 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_multipart.cpython-310.pyc ADDED
Binary file (8.97 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc ADDED
Binary file (6.3 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc ADDED
Binary file (2.84 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc ADDED
Binary file (11 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_urls.cpython-310.pyc ADDED
Binary file (21.8 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc ADDED
Binary file (6.7 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/asgi.cpython-310.pyc ADDED
Binary file (5.16 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/_transports/__pycache__/mock.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
deepseek/lib/python3.10/site-packages/httpx/_transports/base.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+ from types import TracebackType
5
+
6
+ from .._models import Request, Response
7
+
8
+ T = typing.TypeVar("T", bound="BaseTransport")
9
+ A = typing.TypeVar("A", bound="AsyncBaseTransport")
10
+
11
+ __all__ = ["AsyncBaseTransport", "BaseTransport"]
12
+
13
+
14
+ class BaseTransport:
15
+ def __enter__(self: T) -> T:
16
+ return self
17
+
18
+ def __exit__(
19
+ self,
20
+ exc_type: type[BaseException] | None = None,
21
+ exc_value: BaseException | None = None,
22
+ traceback: TracebackType | None = None,
23
+ ) -> None:
24
+ self.close()
25
+
26
+ def handle_request(self, request: Request) -> Response:
27
+ """
28
+ Send a single HTTP request and return a response.
29
+
30
+ Developers shouldn't typically ever need to call into this API directly,
31
+ since the Client class provides all the higher level user-facing API
32
+ niceties.
33
+
34
+ In order to properly release any network resources, the response
35
+ stream should *either* be consumed immediately, with a call to
36
+ `response.stream.read()`, or else the `handle_request` call should
37
+ be followed with a try/finally block to ensuring the stream is
38
+ always closed.
39
+
40
+ Example usage:
41
+
42
+ with httpx.HTTPTransport() as transport:
43
+ req = httpx.Request(
44
+ method=b"GET",
45
+ url=(b"https", b"www.example.com", 443, b"/"),
46
+ headers=[(b"Host", b"www.example.com")],
47
+ )
48
+ resp = transport.handle_request(req)
49
+ body = resp.stream.read()
50
+ print(resp.status_code, resp.headers, body)
51
+
52
+
53
+ Takes a `Request` instance as the only argument.
54
+
55
+ Returns a `Response` instance.
56
+ """
57
+ raise NotImplementedError(
58
+ "The 'handle_request' method must be implemented."
59
+ ) # pragma: no cover
60
+
61
+ def close(self) -> None:
62
+ pass
63
+
64
+
65
+ class AsyncBaseTransport:
66
+ async def __aenter__(self: A) -> A:
67
+ return self
68
+
69
+ async def __aexit__(
70
+ self,
71
+ exc_type: type[BaseException] | None = None,
72
+ exc_value: BaseException | None = None,
73
+ traceback: TracebackType | None = None,
74
+ ) -> None:
75
+ await self.aclose()
76
+
77
+ async def handle_async_request(
78
+ self,
79
+ request: Request,
80
+ ) -> Response:
81
+ raise NotImplementedError(
82
+ "The 'handle_async_request' method must be implemented."
83
+ ) # pragma: no cover
84
+
85
+ async def aclose(self) -> None:
86
+ pass
deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: importlib_metadata
3
+ Version: 8.5.0
4
+ Summary: Read metadata from Python packages
5
+ Author-email: "Jason R. Coombs" <jaraco@jaraco.com>
6
+ Project-URL: Source, https://github.com/python/importlib_metadata
7
+ Classifier: Development Status :: 5 - Production/Stable
8
+ Classifier: Intended Audience :: Developers
9
+ Classifier: License :: OSI Approved :: Apache Software License
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3 :: Only
12
+ Requires-Python: >=3.8
13
+ Description-Content-Type: text/x-rst
14
+ License-File: LICENSE
15
+ Requires-Dist: zipp >=3.20
16
+ Requires-Dist: typing-extensions >=3.6.4 ; python_version < "3.8"
17
+ Provides-Extra: check
18
+ Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check'
19
+ Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check'
20
+ Provides-Extra: cover
21
+ Requires-Dist: pytest-cov ; extra == 'cover'
22
+ Provides-Extra: doc
23
+ Requires-Dist: sphinx >=3.5 ; extra == 'doc'
24
+ Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
25
+ Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
26
+ Requires-Dist: furo ; extra == 'doc'
27
+ Requires-Dist: sphinx-lint ; extra == 'doc'
28
+ Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
29
+ Provides-Extra: enabler
30
+ Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler'
31
+ Provides-Extra: perf
32
+ Requires-Dist: ipython ; extra == 'perf'
33
+ Provides-Extra: test
34
+ Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
35
+ Requires-Dist: packaging ; extra == 'test'
36
+ Requires-Dist: pyfakefs ; extra == 'test'
37
+ Requires-Dist: flufl.flake8 ; extra == 'test'
38
+ Requires-Dist: pytest-perf >=0.9.2 ; extra == 'test'
39
+ Requires-Dist: jaraco.test >=5.4 ; extra == 'test'
40
+ Requires-Dist: importlib-resources >=1.3 ; (python_version < "3.9") and extra == 'test'
41
+ Provides-Extra: type
42
+ Requires-Dist: pytest-mypy ; extra == 'type'
43
+
44
+ .. image:: https://img.shields.io/pypi/v/importlib_metadata.svg
45
+ :target: https://pypi.org/project/importlib_metadata
46
+
47
+ .. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg
48
+
49
+ .. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg
50
+ :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22
51
+ :alt: tests
52
+
53
+ .. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
54
+ :target: https://github.com/astral-sh/ruff
55
+ :alt: Ruff
56
+
57
+ .. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest
58
+ :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest
59
+
60
+ .. image:: https://img.shields.io/badge/skeleton-2024-informational
61
+ :target: https://blog.jaraco.com/skeleton
62
+
63
+ .. image:: https://tidelift.com/badges/package/pypi/importlib-metadata
64
+ :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme
65
+
66
+ Library to access the metadata for a Python package.
67
+
68
+ This package supplies third-party access to the functionality of
69
+ `importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_
70
+ including improvements added to subsequent Python versions.
71
+
72
+
73
+ Compatibility
74
+ =============
75
+
76
+ New features are introduced in this third-party library and later merged
77
+ into CPython. The following table indicates which versions of this library
78
+ were contributed to different versions in the standard library:
79
+
80
+ .. list-table::
81
+ :header-rows: 1
82
+
83
+ * - importlib_metadata
84
+ - stdlib
85
+ * - 7.0
86
+ - 3.13
87
+ * - 6.5
88
+ - 3.12
89
+ * - 4.13
90
+ - 3.11
91
+ * - 4.6
92
+ - 3.10
93
+ * - 1.4
94
+ - 3.8
95
+
96
+
97
+ Usage
98
+ =====
99
+
100
+ See the `online documentation <https://importlib-metadata.readthedocs.io/>`_
101
+ for usage details.
102
+
103
+ `Finder authors
104
+ <https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
105
+ also add support for custom package installers. See the above documentation
106
+ for details.
107
+
108
+
109
+ Caveats
110
+ =======
111
+
112
+ This project primarily supports third-party packages installed by PyPA
113
+ tools (or other conforming packages). It does not support:
114
+
115
+ - Packages in the stdlib.
116
+ - Packages installed without metadata.
117
+
118
+ Project details
119
+ ===============
120
+
121
+ * Project home: https://github.com/python/importlib_metadata
122
+ * Report bugs at: https://github.com/python/importlib_metadata/issues
123
+ * Code hosting: https://github.com/python/importlib_metadata
124
+ * Documentation: https://importlib-metadata.readthedocs.io/
125
+
126
+ For Enterprise
127
+ ==============
128
+
129
+ Available as part of the Tidelift Subscription.
130
+
131
+ This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
132
+
133
+ `Learn more <https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=referral&utm_campaign=github>`_.
deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/REQUESTED ADDED
File without changes
deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (74.1.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
deepseek/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ importlib_metadata
deepseek/lib/python3.10/site-packages/lmformatenforcer/analyzer.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Hashable, List
2
+ try:
3
+ import numpy as np
4
+ import numpy.typing as npt
5
+ except ImportError as e:
6
+ class FormatEnforcerAnalyzer: # type: ignore
7
+ def __init__(self, *args, **kwargs):
8
+ pass
9
+ def report_raw_logits(self, *args, **kwargs):
10
+ pass
11
+ def generate_report_dict(self, *args, **kwargs):
12
+ return {}
13
+ raise ImportError('FormatEnforcerAnalyzer not available because numpy is not installed. Please install it with "pip install numpy"') from e
14
+
15
+ from . import TokenEnforcer
16
+
17
+ class FormatEnforcerAnalyzer:
18
+ """A helper class to help analyze the format enforcer's behavior."""
19
+ def __init__(self, token_enforcer: TokenEnforcer):
20
+ self.token_enforcer = token_enforcer
21
+ self.raw_logits: Dict[Hashable, npt.ArrayLike] = {}
22
+
23
+ def report_raw_logits(self, output_tokens: List[int], logits: npt.ArrayLike):
24
+ """Report what logits were generated for a specific token sequence. The logits must be before any processing / filtering."""
25
+ self.raw_logits[tuple(output_tokens)] = logits
26
+
27
+ def generate_report_dict(self, output_tokens: List[int]) -> dict:
28
+ """Generate a report dict containing the analysis results for a specific output token sequence."""
29
+ scores_matrix: List[npt.ArrayLike] = []
30
+ allowed_tokens_matrix: List[List[int]] = []
31
+ for idx in range(len(output_tokens)):
32
+ prefix = output_tokens[:idx]
33
+ prefix_tuple = tuple(prefix)
34
+ if prefix_tuple in self.raw_logits:
35
+ scores_matrix.append(self.raw_logits[prefix_tuple])
36
+ allowed_tokens_matrix.append(self.token_enforcer.get_allowed_tokens(prefix))
37
+
38
+ logits = np.array(scores_matrix) # n_tokens * vocab_size
39
+ softmax_logits = _softmax(logits) # n_tokens * vocab_size
40
+ original_indices = softmax_logits.argmax(axis=1) # n_tokens
41
+ original_scores = _select_array(softmax_logits, original_indices) # n_tokens
42
+
43
+ single_token_dict: Dict[int, str] = {token_id: token_str for token_id, token_str, _ in self.token_enforcer.regular_tokens}
44
+ def single_token_decoder(token_id: int) -> str:
45
+ if token_id in single_token_dict:
46
+ return single_token_dict[token_id]
47
+ return self.token_enforcer.decoder([token_id])
48
+
49
+ original_tokens = [single_token_decoder(idx) for idx in original_indices]
50
+
51
+ penalty_matrix = np.full_like(softmax_logits, -np.inf)
52
+ for row in range(penalty_matrix.shape[0]):
53
+ penalty_matrix[row][allowed_tokens_matrix[row]] = 0
54
+ enfored_softmax_logits = softmax_logits + penalty_matrix
55
+
56
+ enforced_indices = enfored_softmax_logits.argmax(axis=1)
57
+ enforced_scores = _select_array(enfored_softmax_logits, enforced_indices)
58
+
59
+ enforced_tokens = [single_token_decoder(idx) for idx in enforced_indices]
60
+ df_dict = {} # In order to minimize the package's dependencies, we don't create a dataframe, but create a dataframe-like dictionary instead.
61
+ df_dict['generated_token'] = enforced_tokens
62
+ df_dict['generated_token_idx'] = enforced_indices.tolist()
63
+ df_dict['generated_score'] = enforced_scores.tolist()
64
+ df_dict['leading_token'] = original_tokens
65
+ df_dict['leading_token_idx'] = original_indices.tolist()
66
+ df_dict['leading_score'] = original_scores.tolist()
67
+
68
+ return df_dict
69
+
70
+ def _softmax(arr: np.ndarray) -> np.ndarray:
71
+ """Compute softmax values for each sets of scores in arr."""
72
+ e_arr = np.exp(arr)
73
+ return e_arr / np.sum(e_arr, axis=1, keepdims=True)
74
+
75
+ def _select_array(arr: np.ndarray, index_array: np.ndarray) -> np.ndarray:
76
+ # https://numpy.org/doc/stable/reference/generated/numpy.argmax.html
77
+ return np.take_along_axis(arr, np.expand_dims(index_array, axis=-1), axis=-1).squeeze(axis=-1)
deepseek/lib/python3.10/site-packages/lmformatenforcer/characterlevelparser.py ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import os
3
+ from dataclasses import dataclass, field
4
+ from typing import Hashable, List, Optional, TypeVar
5
+ from .consts import (COMPLETE_ALPHABET, WHITESPACE_CHARACTERS, DEFAULT_MAX_CONSECUTIVE_WHITESPACES,
6
+ DEFAULT_FORCE_JSON_FIELD_ORDER, CONFIG_ENV_VAR_MAX_CONSECUTIVE_WHITESPACES,
7
+ CONFIG_ENV_VAR_STRICT_JSON_FIELD_ORDER, CONFIG_ENV_VAR_MAX_JSON_ARRAY_LENGTH,
8
+ DEFAULT_MAX_JSON_ARRAY_LENGTH)
9
+
10
+
11
+ def _parse_bool(s: str) -> bool:
12
+ return s and (s.strip().lower() in ['true', '1'])
13
+
14
+
15
+ def _env_or_default_field(env_var: str, default_val):
16
+ default_val_type = type(default_val)
17
+ parser_func = _parse_bool if default_val_type == bool else default_val_type
18
+ def factory_func():
19
+ return parser_func(os.environ.get(env_var, str(default_val)))
20
+ return field(default_factory=factory_func)
21
+
22
+
23
+ @dataclass
24
+ class CharacterLevelParserConfig:
25
+ alphabet: str = COMPLETE_ALPHABET
26
+ max_consecutive_whitespaces: int = _env_or_default_field(CONFIG_ENV_VAR_MAX_CONSECUTIVE_WHITESPACES,
27
+ DEFAULT_MAX_CONSECUTIVE_WHITESPACES)
28
+ """How many consective whitespaces the JsonSchemaParser will allow"""
29
+ force_json_field_order: bool = _env_or_default_field(CONFIG_ENV_VAR_STRICT_JSON_FIELD_ORDER,
30
+ DEFAULT_FORCE_JSON_FIELD_ORDER)
31
+ """Whether the JsonSchemaParser will force fields to appear in the
32
+ order of the 'required' field in the schema"""
33
+ max_json_array_length: int = _env_or_default_field(CONFIG_ENV_VAR_MAX_JSON_ARRAY_LENGTH,
34
+ DEFAULT_MAX_JSON_ARRAY_LENGTH)
35
+ """What is the maximum json array length if not specified by the schema. Helps the LLM
36
+ avoid infinite loops."""
37
+
38
+
39
+ class CharacterLevelParser(abc.ABC):
40
+ """CharacterLevelParser is an interface for classes that can parse strings one character at a time, and determine which characters are allowed at any specific time"""
41
+
42
+ def __init__(self, config: Optional[CharacterLevelParserConfig] = None):
43
+ self._config = config or CharacterLevelParserConfig()
44
+
45
+ @abc.abstractmethod
46
+ def add_character(self, new_character: str) -> 'CharacterLevelParser':
47
+ """Add a character to the parser, and return a new parser that represents the state of the parser after the character has been added. This has to be
48
+ an immutable operation - the original CharacterLevelParser (self) must not be modified."""
49
+ raise NotImplementedError()
50
+
51
+ @abc.abstractmethod
52
+ def get_allowed_characters(self) -> str:
53
+ """Return a string containing all characters that are allowed at the current point in the parsing process."""
54
+ raise NotImplementedError()
55
+
56
+ @abc.abstractmethod
57
+ def can_end(self) -> bool:
58
+ """Return True if the parser is in a state where it can end (potentially finished parsing the desired structure), and False otherwise."""
59
+ raise NotImplementedError()
60
+
61
+ def shortcut_key(self) -> Optional[Hashable]:
62
+ """Optional. Return a key that denotes that this state is a repeating state, full tree traversal should be avoided."""
63
+ return None
64
+
65
+ def cache_key(self) -> Optional[Hashable]:
66
+ """Optional. Return a key that denotes that this state is a repeating state, and if it is visited again, results can be cached."""
67
+ return None
68
+
69
+ @property
70
+ def config(self) -> CharacterLevelParserConfig:
71
+ return self._config
72
+
73
+ @config.setter
74
+ def config(self, new_config: CharacterLevelParserConfig):
75
+ self._config = new_config
76
+ return self
77
+
78
+
79
+ class StringParser(CharacterLevelParser):
80
+ """RegexParser is an example CharacterLevelParser that only allows an exact string. It is a debugging / learning tool
81
+ to show how CharacterLevelParser works together with TokenizerPrefixTree to filter the allowed tokens (some of whom may contain multiple characters)"""
82
+ def __init__(self, string: str):
83
+ self.target_str = string
84
+
85
+ def add_character(self, new_character: str) -> CharacterLevelParser:
86
+ if self.target_str.startswith(new_character):
87
+ return StringParser(self.target_str[len(new_character):])
88
+ else:
89
+ raise ValueError(f"Expected '{self.target_str[0]}' but got '{new_character}'")
90
+
91
+ def get_allowed_characters(self) -> str:
92
+ return self.target_str[0] if self.target_str else ""
93
+
94
+ def can_end(self) -> bool:
95
+ return not self.target_str
96
+
97
+
98
+ class ForceStopParser(CharacterLevelParser):
99
+ """A simple parser that forbids any characters except the stop token. Used to force stop LM operation"""
100
+ def __init__(self, allow_whitespace: bool = False):
101
+ self.allow_whitespace = allow_whitespace
102
+ def add_character(self, new_character: str) -> CharacterLevelParser:
103
+ return self
104
+ def get_allowed_characters(self) -> str:
105
+ return WHITESPACE_CHARACTERS if self.allow_whitespace else ""
106
+ def can_end(self) -> bool:
107
+ return True
108
+
109
+
110
+ class UnionParser(CharacterLevelParser):
111
+ """A parser that allows a string that would be allowed by any of several different parsers"""
112
+ def __init__(self, parsers: List[CharacterLevelParser]):
113
+ self.parsers = parsers
114
+
115
+ def add_character(self, new_character: str) -> CharacterLevelParser:
116
+ # This is a bit of a performance hit, as it means get_allowed_characters() is called twice.
117
+ relevant_parsers = [parser for parser in self.parsers if new_character in parser.get_allowed_characters()]
118
+ next_parsers = [parser.add_character(new_character) for parser in relevant_parsers]
119
+ if len(next_parsers) == 1:
120
+ return next_parsers[0]
121
+ return UnionParser(next_parsers)
122
+
123
+ def get_allowed_characters(self) -> str:
124
+ allowed = "".join([parser.get_allowed_characters() for parser in self.parsers])
125
+ return "".join(set(allowed))
126
+
127
+ def can_end(self) -> bool:
128
+ return any([parser.can_end() for parser in self.parsers])
129
+
130
+ def shortcut_key(self) -> Optional[Hashable]:
131
+ unique_shortcut_keys = set(parser.shortcut_key() for parser in self.parsers)
132
+ if len(unique_shortcut_keys) == 1:
133
+ return next(iter(unique_shortcut_keys))
134
+ return None
135
+
136
+ def cache_key(self) -> Optional[Hashable]:
137
+ all_cache_keys = tuple(parser.cache_key() for parser in self.parsers)
138
+ if all(key is not None for key in all_cache_keys):
139
+ return ('union', all_cache_keys)
140
+ return None
141
+
142
+
143
+ class SequenceParser(CharacterLevelParser):
144
+ """A parser that is a sequence of multiple parsers."""
145
+ def __init__(self, parsers: List[CharacterLevelParser]):
146
+ self.parsers = parsers
147
+
148
+ def add_character(self, new_character: str) -> CharacterLevelParser:
149
+ legal_parsers = []
150
+ # Tricky edge case: if the first parser can both end and accept the character,
151
+ # and the second parser can also accept, we don't know which scenario we are dealing
152
+ # with, so we need to return a UnionParser.
153
+ for idx, parser in enumerate(self.parsers):
154
+ if new_character in parser.get_allowed_characters():
155
+ updated_parser = parser.add_character(new_character)
156
+ next_parsers = [updated_parser] + self.parsers[idx+1:]
157
+ if len(next_parsers) == 1:
158
+ legal_parsers.append(next_parsers[0])
159
+ else:
160
+ legal_parsers.append(SequenceParser(next_parsers))
161
+ if not parser.can_end():
162
+ break
163
+ if len(legal_parsers) == 1:
164
+ return legal_parsers[0]
165
+ return UnionParser(legal_parsers)
166
+
167
+ def get_allowed_characters(self) -> str:
168
+ allowed_characters = set()
169
+ for parser in self.parsers:
170
+ allowed_characters.update(parser.get_allowed_characters())
171
+ if not parser.can_end():
172
+ break
173
+ return "".join(allowed_characters)
174
+
175
+ def can_end(self) -> bool:
176
+ return all([parser.can_end() for parser in self.parsers])
177
+
178
+ def shortcut_key(self) -> Optional[str]:
179
+ return self.parsers[0].shortcut_key() if len(self.parsers) == 1 else None
180
+
181
+ def cache_key(self) -> Optional[Hashable]:
182
+ all_cache_keys = tuple(parser.cache_key() for parser in self.parsers)
183
+ if all(key is not None for key in all_cache_keys):
184
+ return ('sequence', all_cache_keys)
185
+ return None
186
+
187
+
deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__init__.py ADDED
File without changes
deepseek/lib/python3.10/site-packages/lmformatenforcer/external/__pycache__/jsonschemaobjectutil.cpython-310.pyc ADDED
Binary file (6.42 kB). View file
 
deepseek/lib/python3.10/site-packages/lmformatenforcer/external/jsonschemaobjectutil.py ADDED
@@ -0,0 +1,231 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/koxudaxi/datamodel-code-generator/blob/master/datamodel_code_generator/util.py
2
+ # MIT License
3
+
4
+ # Copyright (c) 2019 Koudai Aono
5
+
6
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
7
+ # of this software and associated documentation files (the "Software"), to deal
8
+ # in the Software without restriction, including without limitation the rights
9
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
+ # copies of the Software, and to permit persons to whom the Software is
11
+ # furnished to do so, subject to the following conditions:
12
+ #
13
+ # The above copyright notice and this permission notice shall be included in all
14
+ # copies or substantial portions of the Software.
15
+
16
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22
+ # SOFTWARE.
23
+ from __future__ import annotations
24
+
25
+ from typing import TYPE_CHECKING, Any, Callable, TypeVar
26
+ from enum import Enum, auto
27
+
28
+ import pydantic
29
+ from packaging import version
30
+ from pydantic import BaseModel as _BaseModel
31
+
32
+ from typing import (
33
+ TYPE_CHECKING,
34
+ Any,
35
+ Callable,
36
+ Iterator,
37
+ TypeVar,
38
+ Union,
39
+ )
40
+
41
+ PYDANTIC_VERSION = version.parse(
42
+ pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION)
43
+ )
44
+
45
+ PYDANTIC_V2: bool = PYDANTIC_VERSION >= version.parse('2.0b3')
46
+
47
+ if PYDANTIC_V2:
48
+ from pydantic import GetCoreSchemaHandler
49
+ from pydantic_core import core_schema
50
+
51
+ if TYPE_CHECKING:
52
+ cached_property = property
53
+ from yaml import SafeLoader
54
+
55
+ Protocol = object
56
+ runtime_checkable: Callable[..., Any]
57
+
58
+ from typing_extensions import Literal
59
+ else:
60
+ try:
61
+ from typing import Protocol
62
+ except ImportError:
63
+ from typing_extensions import Protocol # noqa
64
+ try:
65
+ from typing import runtime_checkable
66
+ except ImportError:
67
+ from typing_extensions import runtime_checkable # noqa
68
+ try:
69
+ from yaml import CSafeLoader as SafeLoader
70
+ except ImportError: # pragma: no cover
71
+ from yaml import SafeLoader
72
+
73
+ try:
74
+ from functools import cached_property
75
+ except ImportError:
76
+ _NOT_FOUND = object()
77
+
78
+ class cached_property:
79
+ def __init__(self, func: Callable) -> None:
80
+ self.func: Callable = func
81
+ self.__doc__: Any = func.__doc__
82
+
83
+ def __get__(self, instance: Any, owner: Any = None) -> Any:
84
+ value = instance.__dict__.get(self.func.__name__, _NOT_FOUND)
85
+ if value is _NOT_FOUND: # pragma: no cover
86
+ value = instance.__dict__[self.func.__name__] = self.func(instance)
87
+ return value
88
+
89
+
90
+ SafeLoader.yaml_constructors[
91
+ 'tag:yaml.org,2002:timestamp'
92
+ ] = SafeLoader.yaml_constructors['tag:yaml.org,2002:str']
93
+
94
+
95
+ Model = TypeVar('Model', bound=_BaseModel)
96
+
97
+
98
+ def model_validator(
99
+ mode: Literal['before', 'after'] = 'after',
100
+ ) -> Callable[[Callable[[Model, Any], Any]], Callable[[Model, Any], Any]]:
101
+ def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
102
+ if PYDANTIC_V2:
103
+ from pydantic import model_validator as model_validator_v2
104
+
105
+ return model_validator_v2(mode=mode)(method) # type: ignore
106
+ else:
107
+ from pydantic import root_validator
108
+
109
+ return root_validator(method, pre=mode == 'before') # type: ignore
110
+
111
+ return inner
112
+
113
+
114
+ def field_validator(
115
+ field_name: str,
116
+ *fields: str,
117
+ mode: Literal['before', 'after'] = 'after',
118
+ ) -> Callable[[Any], Callable[[Model, Any], Any]]:
119
+ def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
120
+ if PYDANTIC_V2:
121
+ from pydantic import field_validator as field_validator_v2
122
+
123
+ return field_validator_v2(field_name, *fields, mode=mode)(method) # type: ignore
124
+ else:
125
+ from pydantic import validator
126
+
127
+ return validator(field_name, *fields, pre=mode == 'before')(method) # type: ignore
128
+
129
+ return inner
130
+
131
+
132
+ if PYDANTIC_V2:
133
+ from pydantic import ConfigDict as ConfigDict
134
+ else:
135
+ ConfigDict = dict # type: ignore
136
+
137
+
138
+ class BaseModel(_BaseModel):
139
+ if PYDANTIC_V2:
140
+ model_config = ConfigDict(strict=False)
141
+
142
+
143
+ def is_url(ref: str) -> bool:
144
+ return ref.startswith(('https://', 'http://'))
145
+
146
+
147
+ class Types(Enum):
148
+ integer = auto()
149
+ int32 = auto()
150
+ int64 = auto()
151
+ number = auto()
152
+ float = auto()
153
+ double = auto()
154
+ decimal = auto()
155
+ time = auto()
156
+ string = auto()
157
+ byte = auto()
158
+ binary = auto()
159
+ date = auto()
160
+ date_time = auto()
161
+ password = auto()
162
+ email = auto()
163
+ uuid = auto()
164
+ uuid1 = auto()
165
+ uuid2 = auto()
166
+ uuid3 = auto()
167
+ uuid4 = auto()
168
+ uuid5 = auto()
169
+ uri = auto()
170
+ hostname = auto()
171
+ ipv4 = auto()
172
+ ipv4_network = auto()
173
+ ipv6 = auto()
174
+ ipv6_network = auto()
175
+ boolean = auto()
176
+ object = auto()
177
+ null = auto()
178
+ array = auto()
179
+ any = auto()
180
+
181
+ class UnionIntFloat:
182
+ def __init__(self, value: Union[int, float]) -> None:
183
+ self.value: Union[int, float] = value
184
+
185
+ def __int__(self) -> int:
186
+ return int(self.value)
187
+
188
+ def __float__(self) -> float:
189
+ return float(self.value)
190
+
191
+ def __str__(self) -> str:
192
+ return str(self.value)
193
+
194
+ @classmethod
195
+ def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]:
196
+ yield cls.validate
197
+
198
+ @classmethod
199
+ def __get_pydantic_core_schema__(
200
+ cls, _source_type: Any, _handler: GetCoreSchemaHandler
201
+ ) -> core_schema.CoreSchema:
202
+ from_int_schema = core_schema.chain_schema(
203
+ [
204
+ core_schema.union_schema(
205
+ [core_schema.int_schema(), core_schema.float_schema()]
206
+ ),
207
+ core_schema.no_info_plain_validator_function(cls.validate),
208
+ ]
209
+ )
210
+
211
+ return core_schema.json_or_python_schema(
212
+ json_schema=core_schema.no_info_plain_validator_function(cls.validate),
213
+ python_schema=core_schema.union_schema(
214
+ [
215
+ # check if it's an instance first before doing any further work
216
+ core_schema.is_instance_schema(UnionIntFloat),
217
+ from_int_schema,
218
+ ]
219
+ ),
220
+ serialization=core_schema.plain_serializer_function_ser_schema(
221
+ lambda instance: instance.value
222
+ ),
223
+ )
224
+
225
+ @classmethod
226
+ def validate(cls, v: Any) -> UnionIntFloat:
227
+ if isinstance(v, UnionIntFloat):
228
+ return v
229
+ elif not isinstance(v, (int, float)): # pragma: no cover
230
+ raise TypeError(f'{v} is not int or float')
231
+ return cls(v)
deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/llamacpp.cpython-310.pyc ADDED
Binary file (3.13 kB). View file
 
deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/__pycache__/transformers.cpython-310.pyc ADDED
Binary file (6.38 kB). View file
 
deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/llamacpp.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ try:
2
+ from llama_cpp import Llama, LogitsProcessor
3
+ except ImportError:
4
+ raise ImportError('llama-cpp-python is not installed. Please install it with "pip install llama-cpp-python"')
5
+ from lmformatenforcer import CharacterLevelParser, TokenEnforcer, FormatEnforcerAnalyzer, TokenEnforcerTokenizerData
6
+ import numpy as np
7
+ import numpy.typing as npt
8
+ from typing import Tuple, List, Union
9
+
10
+ def _build_regular_tokens_list(llm: Llama) -> List[Tuple[int, str, bool]]:
11
+ token_0 = llm.tokenize(b"0")[-1]
12
+ regular_tokens = []
13
+ special_tokens = [llm.token_bos(), llm.token_eos()]
14
+ for token_idx in range(llm.n_vocab()):
15
+ if token_idx in special_tokens:
16
+ continue
17
+ # We prepend token 0 and skip the first letter of the result to get a space if the token is a start word.
18
+ try:
19
+ decoded_after_0 = llm.detokenize([token_0, token_idx]).decode('utf-8')[1:]
20
+ decoded_regular = llm.detokenize([token_idx]).decode('utf-8')
21
+ is_word_start_token = len(decoded_after_0) > len(decoded_regular)
22
+ regular_tokens.append((token_idx, decoded_after_0, is_word_start_token))
23
+ except:
24
+ # This can happen for cases such as raw bytes outside of the ASCII range. We assign this a value of �,
25
+ # which is what huggingface does for tokens that are meaningless on their own. Allowing this in the
26
+ # json_freetext field will allow the language model to build unicode sequences from multiple tokens
27
+ # in JSON-freetext fields.
28
+ regular_tokens.append((token_idx, '�', False))
29
+ return regular_tokens
30
+
31
+
32
+ def build_token_enforcer_tokenizer_data(llm: Llama) -> TokenEnforcerTokenizerData:
33
+ regular_tokens = _build_regular_tokens_list(llm)
34
+
35
+ def decoder_fn(sent: List[int]) -> str:
36
+ try:
37
+ return llm.detokenize(sent).decode('utf-8')
38
+ except:
39
+ return decoder_fn(sent[:-1])
40
+
41
+ return TokenEnforcerTokenizerData(regular_tokens, decoder_fn, llm.token_eos())
42
+
43
+
44
+ class LlamaCppLogitsProcessor:
45
+ def __init__(self, token_enforcer: TokenEnforcer, analyze):
46
+ self.token_enforcer = token_enforcer
47
+ self.analyzer = FormatEnforcerAnalyzer(token_enforcer) if analyze else None
48
+ self.mask = None
49
+
50
+ def __call__(self, input_ids: npt.NDArray[np.intc], scores: npt.NDArray[np.single]) -> npt.NDArray[np.single]:
51
+ token_sequence = input_ids.tolist()
52
+ if self.analyzer:
53
+ self.analyzer.report_raw_logits(token_sequence, scores.tolist())
54
+ allowed_tokens = self.token_enforcer.get_allowed_tokens(token_sequence)
55
+ if self.mask is None:
56
+ self.mask = np.ones(scores.shape, bool)
57
+ else:
58
+ self.mask.fill(True)
59
+ self.mask[allowed_tokens] = False
60
+ scores[self.mask] = float('-inf')
61
+ return scores
62
+
63
+
64
+ def build_llamacpp_logits_processor(llm: Union[Llama, TokenEnforcerTokenizerData], character_level_parser: CharacterLevelParser, analyze: bool=False) -> LlamaCppLogitsProcessor:
65
+ """Build the logits processor function that llama.cpp will use to filter the tokens generated by the model. The result
66
+ can be passed in the logits_processor list that is sent to the call or generate() method of llama.cpp models."""
67
+ if isinstance(llm, Llama):
68
+ llm = build_token_enforcer_tokenizer_data(llm)
69
+
70
+ token_enforcer = TokenEnforcer(llm, character_level_parser)
71
+ return LlamaCppLogitsProcessor(token_enforcer, analyze)
72
+
73
+
74
+ __all__ = ['build_llamacpp_logits_processor', 'build_token_enforcer_tokenizer_data']
deepseek/lib/python3.10/site-packages/lmformatenforcer/integrations/transformers.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ from typing import Any, Callable, List, Optional, Tuple, Union
3
+ try:
4
+ from transformers import AutoModelForCausalLM
5
+ from transformers.generation.logits_process import LogitsWarper, PrefixConstrainedLogitsProcessor
6
+ from transformers.tokenization_utils import PreTrainedTokenizerBase
7
+ except ImportError:
8
+ raise ImportError('transformers is not installed. Please install it with "pip install transformers[torch]"')
9
+
10
+ try:
11
+ import torch
12
+ except ImportError:
13
+ raise ImportError('pytorch is not installed. See https://pytorch.org/get-started/locally/ for installation instructions."')
14
+
15
+ from ..characterlevelparser import CharacterLevelParser
16
+ from ..tokenenforcer import TokenEnforcer, TokenEnforcerTokenizerData
17
+ from ..analyzer import FormatEnforcerAnalyzer
18
+
19
+ class LogitsSaverWarper(LogitsWarper):
20
+ def __init__(self, analyzer: FormatEnforcerAnalyzer) -> None:
21
+ self.analyzer = analyzer
22
+
23
+ def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor) -> torch.FloatTensor:
24
+ cpu_inputs = input_ids.tolist()
25
+ cpu_scores = scores.tolist()
26
+ for single_batch_inputs, single_batch_scores in zip(cpu_inputs, cpu_scores):
27
+ self.analyzer.report_raw_logits(single_batch_inputs, single_batch_scores)
28
+ return scores
29
+
30
+ class LogitsSaverManager:
31
+ warper: LogitsSaverWarper
32
+
33
+ def __init__(self, model: AutoModelForCausalLM, analyzer: FormatEnforcerAnalyzer):
34
+ self.model = model
35
+ self.warper = None
36
+ self.old_warper = None
37
+ self.analyzer = analyzer
38
+
39
+ def replace_logits_warper(self, filter_func = None):
40
+ self.old_warper = self.model._get_logits_warper
41
+
42
+ def new_logits_warper(generation_config):
43
+ warpers = self.old_warper(generation_config)
44
+ self.warper = LogitsSaverWarper(self.analyzer)
45
+ warpers.insert(0, self.warper)
46
+ if filter_func is not None:
47
+ processor = PrefixConstrainedLogitsProcessor(filter_func, 1)
48
+ warpers.insert(1, processor)
49
+ return warpers
50
+ self.model._get_logits_warper = new_logits_warper
51
+
52
+ def unreplace_logits_warper(self):
53
+ self.model._get_logits_warper = self.old_warper
54
+
55
+
56
+ def _build_regular_tokens_list(tokenizer: PreTrainedTokenizerBase, vocab_size: int) -> List[Tuple[int, str, bool]]:
57
+ token_0 = tokenizer.encode("0")[-1]
58
+ regular_tokens = []
59
+ for token_idx in range(vocab_size):
60
+ if token_idx in tokenizer.all_special_ids:
61
+ continue
62
+ # We prepend token 0 and skip the first letter of the result to get a space if the token is a start word.
63
+ decoded_after_0 = tokenizer.decode([token_0, token_idx])[1:]
64
+ decoded_regular = tokenizer.decode([token_idx])
65
+ is_word_start_token = len(decoded_after_0) > len(decoded_regular)
66
+ regular_tokens.append((token_idx, decoded_after_0, is_word_start_token))
67
+ return regular_tokens
68
+
69
+
70
+ def _decode_function(tokenizer: PreTrainedTokenizerBase, tokens: List[int]) -> str:
71
+ decoded = tokenizer.decode(tokens)
72
+ cleaned = decoded.rstrip('�')
73
+ return cleaned
74
+
75
+
76
+ def build_token_enforcer_tokenizer_data(tokenizer: PreTrainedTokenizerBase,
77
+ vocab_size: Optional[int] = None) -> TokenEnforcerTokenizerData:
78
+ vocab_size = vocab_size or len(tokenizer)
79
+ regular_tokens = _build_regular_tokens_list(tokenizer, vocab_size)
80
+ decode_fn = functools.partial(_decode_function, tokenizer)
81
+ return TokenEnforcerTokenizerData(regular_tokens, decode_fn, tokenizer.eos_token_id)
82
+
83
+
84
+ class TransformersPrefixAllowedTokensFn:
85
+ def __init__(self, token_enforcer: TokenEnforcer):
86
+ self.token_enforcer = token_enforcer
87
+
88
+ def __call__(self, batch_id: int, sent: torch.Tensor) -> List[int]:
89
+ token_sequence = sent.tolist()
90
+ return self.token_enforcer.get_allowed_tokens(token_sequence)
91
+
92
+
93
+ def build_transformers_prefix_allowed_tokens_fn(tokenizer_data: Union[PreTrainedTokenizerBase, TokenEnforcerTokenizerData],
94
+ character_level_parser: CharacterLevelParser) -> TransformersPrefixAllowedTokensFn:
95
+ """Build the prefix allowed tokens function that transformers will use to filter the tokens generated by the model. The result
96
+ can be passed to the prefix_allowed_tokens_fn parameter of the generate() method of transformers models or pipeline configurations."""
97
+ if isinstance(tokenizer_data, PreTrainedTokenizerBase):
98
+ tokenizer_data = build_token_enforcer_tokenizer_data(tokenizer_data)
99
+ token_enforcer = TokenEnforcer(tokenizer_data, character_level_parser)
100
+ return TransformersPrefixAllowedTokensFn(token_enforcer)
101
+
102
+
103
+ def generate_enforced(model: AutoModelForCausalLM,
104
+ tokenizer: Union[PreTrainedTokenizerBase, TokenEnforcerTokenizerData],
105
+ character_level_parser: CharacterLevelParser,
106
+ **kwargs: dict) -> Union[str, dict]:
107
+ """Generate text from a model while enforcing a given format, generating enforcing diagnostic information.
108
+ This can be used instead of calling model.generate().
109
+ If return_dict_in_generate and output_scores parameters are True, diagnostic information will be returned in the result.
110
+ If you don't need this, consider using prefix_allowed_tokens_fn + build_transformers_prefix_allowed_tokens_fn() instead"""
111
+
112
+ transformers_filter_allowed_tokens = build_transformers_prefix_allowed_tokens_fn(tokenizer, character_level_parser)
113
+
114
+ is_multi_inputs = kwargs['input_ids'].shape[0] > 1
115
+ is_multi_beams = kwargs.get('num_beams', 1) > 1
116
+ support_diagnostics = not (is_multi_inputs or is_multi_beams) # TODO: Support diagnostics in these cases as well.
117
+ return_dict_in_generate = kwargs.get('return_dict_in_generate', False)
118
+ output_scores = kwargs.get('output_scores', None)
119
+
120
+ # We do some internals hacking in order to extract the data needed for diagnostics. If we weren't asked for them,
121
+ # we are better off simply using prefix_allowed_tokens_fn parameter.
122
+ should_run_in_advanced_mode = return_dict_in_generate and output_scores and support_diagnostics
123
+
124
+ if should_run_in_advanced_mode:
125
+ analyzer = FormatEnforcerAnalyzer(transformers_filter_allowed_tokens.token_enforcer)
126
+ logits_saver = LogitsSaverManager(model, analyzer)
127
+ logits_saver.replace_logits_warper(transformers_filter_allowed_tokens)
128
+ generate_kwargs = kwargs
129
+
130
+ try:
131
+ output = model.generate(**generate_kwargs)
132
+ finally:
133
+ logits_saver.unreplace_logits_warper()
134
+
135
+ df_dict = analyzer.generate_report_dict(output['sequences'][0].tolist())
136
+ output.enforced_scores = df_dict
137
+ else:
138
+ output = model.generate(**kwargs, prefix_allowed_tokens_fn=transformers_filter_allowed_tokens)
139
+
140
+ return output
141
+
142
+ __all__ = [
143
+ 'build_transformers_prefix_allowed_tokens_fn',
144
+ 'generate_enforced',
145
+ 'build_token_enforcer_tokenizer_data'
146
+ ]
deepseek/lib/python3.10/site-packages/lmformatenforcer/regexparser.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Hashable, Optional, Union, List
2
+ import interegular
3
+ from interegular.fsm import anything_else
4
+
5
+ from .characterlevelparser import CharacterLevelParser, CharacterLevelParserConfig
6
+
7
+ class RegexParser(CharacterLevelParser):
8
+ """RegexParser is an example CharacterLevelParser that only allows strings that match a given regular expression."""
9
+
10
+ UNINITIALIZED_STATE = -1
11
+ INVALID_STATE = -2
12
+
13
+ class _Context:
14
+ pattern: interegular.FSM
15
+ anything_else_characters: str
16
+ state_character_cache: Dict[int, str]
17
+
18
+ context: _Context
19
+ current_state: int
20
+
21
+ def __init__(self, pattern: Union[str, _Context], config: Optional[CharacterLevelParserConfig] = None, current_state: int = UNINITIALIZED_STATE):
22
+ super().__init__(config)
23
+ if isinstance(pattern, str):
24
+ self.context = RegexParser._Context()
25
+ self.context.pattern = interegular.parse_pattern(pattern).to_fsm()
26
+ self.context.state_character_cache = {}
27
+ self._update_alphabet(self.config.alphabet)
28
+ else:
29
+ self.context = pattern
30
+ self.current_state: int = self.context.pattern.initial if current_state == RegexParser.UNINITIALIZED_STATE else current_state
31
+
32
+ def add_character(self, new_character: str) -> 'RegexParser':
33
+ if self.current_state == RegexParser.INVALID_STATE:
34
+ return self
35
+
36
+ state = self.current_state
37
+ fsm = self.context.pattern
38
+ # Mostly taken from FSM.accept()
39
+ symbol = new_character
40
+ if anything_else in fsm.alphabet and not symbol in fsm.alphabet:
41
+ symbol = anything_else
42
+ transition = fsm.alphabet[symbol]
43
+
44
+ try:
45
+ # Prefer try-catch to checking if transition exists to avoid double lookup perf hit in valid case
46
+ state = fsm.map[state][transition] # type: ignore
47
+ return RegexParser(self.context, self.config, state)
48
+ except KeyError:
49
+ # Missing transition = transition to dead state
50
+ return RegexParser(self.context, self.config, RegexParser.INVALID_STATE)
51
+
52
+ def can_end(self) -> bool:
53
+ return self.current_state in self.context.pattern.finals or self.current_state == RegexParser.INVALID_STATE
54
+
55
+ def get_allowed_characters(self) -> str:
56
+ if self.current_state not in self.context.pattern.map:
57
+ return ''
58
+ if self.current_state not in self.context.state_character_cache:
59
+ allowed_characters = []
60
+ state_map = self.context.pattern.map[self.current_state]
61
+ for symbol_idx in state_map:
62
+ symbols: List[str] = self.context.pattern.alphabet.by_transition[symbol_idx]
63
+ for symbol in symbols:
64
+ if symbol == anything_else:
65
+ allowed_characters.append(self.context.anything_else_characters)
66
+ else:
67
+ allowed_characters.append(symbol)
68
+ self.context.state_character_cache[self.current_state] = "".join(allowed_characters)
69
+ return self.context.state_character_cache[self.current_state]
70
+
71
+ def cache_key(self) -> Optional[Hashable]:
72
+ # If we are in the same regex fsm state, the allowed next tokens are the same ones
73
+ return self.current_state
74
+
75
+ def _update_alphabet(self, new_alphabet: str):
76
+ if self.context:
77
+ not_anything_else_characters = set([c for c in self.context.pattern.alphabet.keys() if c != anything_else])
78
+ self.context.anything_else_characters = "".join([c for c in new_alphabet if c not in not_anything_else_characters])
79
+
80
+ @CharacterLevelParser.config.setter
81
+ def config(self, new_config: CharacterLevelParserConfig):
82
+ CharacterLevelParser.config.fset(self, new_config) # Original set
83
+ self._update_alphabet(new_config.alphabet)
84
+
85
+
deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
deepseek/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.3
2
+ Name: packaging
3
+ Version: 24.2
4
+ Summary: Core utilities for Python packages
5
+ Author-email: Donald Stufft <donald@stufft.io>
6
+ Requires-Python: >=3.8
7
+ Description-Content-Type: text/x-rst
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: Apache Software License
11
+ Classifier: License :: OSI Approved :: BSD License
12
+ Classifier: Programming Language :: Python
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3 :: Only
15
+ Classifier: Programming Language :: Python :: 3.8
16
+ Classifier: Programming Language :: Python :: 3.9
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Programming Language :: Python :: 3.13
21
+ Classifier: Programming Language :: Python :: Implementation :: CPython
22
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
23
+ Classifier: Typing :: Typed
24
+ Project-URL: Documentation, https://packaging.pypa.io/
25
+ Project-URL: Source, https://github.com/pypa/packaging
26
+
27
+ packaging
28
+ =========
29
+
30
+ .. start-intro
31
+
32
+ Reusable core utilities for various Python Packaging
33
+ `interoperability specifications <https://packaging.python.org/specifications/>`_.
34
+
35
+ This library provides utilities that implement the interoperability
36
+ specifications which have clearly one correct behaviour (eg: :pep:`440`)
37
+ or benefit greatly from having a single shared implementation (eg: :pep:`425`).
38
+
39
+ .. end-intro
40
+
41
+ The ``packaging`` project includes the following: version handling, specifiers,
42
+ markers, requirements, tags, utilities.
43
+
44
+ Documentation
45
+ -------------
46
+
47
+ The `documentation`_ provides information and the API for the following:
48
+
49
+ - Version Handling
50
+ - Specifiers
51
+ - Markers
52
+ - Requirements
53
+ - Tags
54
+ - Utilities
55
+
56
+ Installation
57
+ ------------
58
+
59
+ Use ``pip`` to install these utilities::
60
+
61
+ pip install packaging
62
+
63
+ The ``packaging`` library uses calendar-based versioning (``YY.N``).
64
+
65
+ Discussion
66
+ ----------
67
+
68
+ If you run into bugs, you can file them in our `issue tracker`_.
69
+
70
+ You can also join ``#pypa`` on Freenode to ask questions or get involved.
71
+
72
+
73
+ .. _`documentation`: https://packaging.pypa.io/
74
+ .. _`issue tracker`: https://github.com/pypa/packaging/issues
75
+
76
+
77
+ Code of Conduct
78
+ ---------------
79
+
80
+ Everyone interacting in the packaging project's codebases, issue trackers, chat
81
+ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
82
+
83
+ .. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
84
+
85
+ Contributing
86
+ ------------
87
+
88
+ The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
89
+ well as how to report a potential security issue. The documentation for this
90
+ project also covers information about `project development`_ and `security`_.
91
+
92
+ .. _`project development`: https://packaging.pypa.io/en/latest/development/
93
+ .. _`security`: https://packaging.pypa.io/en/latest/security/
94
+
95
+ Project History
96
+ ---------------
97
+
98
+ Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
99
+ recent changes and project history.
100
+
101
+ .. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
102
+
deepseek/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d74d07fb876852c3181c7a6b54401a804941c909e42efc31fff34dc31da6c5b
3
+ size 100332
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (311 Bytes). View file
 
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/ast_parser.cpython-310.pyc ADDED
Binary file (3.13 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/mathematica.cpython-310.pyc ADDED
Binary file (29.1 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/maxima.cpython-310.pyc ADDED
Binary file (2.47 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sym_expr.cpython-310.pyc ADDED
Binary file (8.65 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/__pycache__/sympy_parser.cpython-310.pyc ADDED
Binary file (30.4 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License (MIT)
2
+
3
+ Copyright 2016, latex2sympy
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/LaTeX.g4 ADDED
@@ -0,0 +1,312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+ ANTLR4 LaTeX Math Grammar
3
+
4
+ Ported from latex2sympy by @augustt198 https://github.com/augustt198/latex2sympy See license in
5
+ LICENSE.txt
6
+ */
7
+
8
+ /*
9
+ After changing this file, it is necessary to run `python setup.py antlr` in the root directory of
10
+ the repository. This will regenerate the code in `sympy/parsing/latex/_antlr/*.py`.
11
+ */
12
+
13
+ grammar LaTeX;
14
+
15
+ options {
16
+ language = Python3;
17
+ }
18
+
19
+ WS: [ \t\r\n]+ -> skip;
20
+ THINSPACE: ('\\,' | '\\thinspace') -> skip;
21
+ MEDSPACE: ('\\:' | '\\medspace') -> skip;
22
+ THICKSPACE: ('\\;' | '\\thickspace') -> skip;
23
+ QUAD: '\\quad' -> skip;
24
+ QQUAD: '\\qquad' -> skip;
25
+ NEGTHINSPACE: ('\\!' | '\\negthinspace') -> skip;
26
+ NEGMEDSPACE: '\\negmedspace' -> skip;
27
+ NEGTHICKSPACE: '\\negthickspace' -> skip;
28
+ CMD_LEFT: '\\left' -> skip;
29
+ CMD_RIGHT: '\\right' -> skip;
30
+
31
+ IGNORE:
32
+ (
33
+ '\\vrule'
34
+ | '\\vcenter'
35
+ | '\\vbox'
36
+ | '\\vskip'
37
+ | '\\vspace'
38
+ | '\\hfil'
39
+ | '\\*'
40
+ | '\\-'
41
+ | '\\.'
42
+ | '\\/'
43
+ | '\\"'
44
+ | '\\('
45
+ | '\\='
46
+ ) -> skip;
47
+
48
+ ADD: '+';
49
+ SUB: '-';
50
+ MUL: '*';
51
+ DIV: '/';
52
+
53
+ L_PAREN: '(';
54
+ R_PAREN: ')';
55
+ L_BRACE: '{';
56
+ R_BRACE: '}';
57
+ L_BRACE_LITERAL: '\\{';
58
+ R_BRACE_LITERAL: '\\}';
59
+ L_BRACKET: '[';
60
+ R_BRACKET: ']';
61
+
62
+ BAR: '|';
63
+
64
+ R_BAR: '\\right|';
65
+ L_BAR: '\\left|';
66
+
67
+ L_ANGLE: '\\langle';
68
+ R_ANGLE: '\\rangle';
69
+ FUNC_LIM: '\\lim';
70
+ LIM_APPROACH_SYM:
71
+ '\\to'
72
+ | '\\rightarrow'
73
+ | '\\Rightarrow'
74
+ | '\\longrightarrow'
75
+ | '\\Longrightarrow';
76
+ FUNC_INT:
77
+ '\\int'
78
+ | '\\int\\limits';
79
+ FUNC_SUM: '\\sum';
80
+ FUNC_PROD: '\\prod';
81
+
82
+ FUNC_EXP: '\\exp';
83
+ FUNC_LOG: '\\log';
84
+ FUNC_LG: '\\lg';
85
+ FUNC_LN: '\\ln';
86
+ FUNC_SIN: '\\sin';
87
+ FUNC_COS: '\\cos';
88
+ FUNC_TAN: '\\tan';
89
+ FUNC_CSC: '\\csc';
90
+ FUNC_SEC: '\\sec';
91
+ FUNC_COT: '\\cot';
92
+
93
+ FUNC_ARCSIN: '\\arcsin';
94
+ FUNC_ARCCOS: '\\arccos';
95
+ FUNC_ARCTAN: '\\arctan';
96
+ FUNC_ARCCSC: '\\arccsc';
97
+ FUNC_ARCSEC: '\\arcsec';
98
+ FUNC_ARCCOT: '\\arccot';
99
+
100
+ FUNC_SINH: '\\sinh';
101
+ FUNC_COSH: '\\cosh';
102
+ FUNC_TANH: '\\tanh';
103
+ FUNC_ARSINH: '\\arsinh';
104
+ FUNC_ARCOSH: '\\arcosh';
105
+ FUNC_ARTANH: '\\artanh';
106
+
107
+ L_FLOOR: '\\lfloor';
108
+ R_FLOOR: '\\rfloor';
109
+ L_CEIL: '\\lceil';
110
+ R_CEIL: '\\rceil';
111
+
112
+ FUNC_SQRT: '\\sqrt';
113
+ FUNC_OVERLINE: '\\overline';
114
+
115
+ CMD_TIMES: '\\times';
116
+ CMD_CDOT: '\\cdot';
117
+ CMD_DIV: '\\div';
118
+ CMD_FRAC:
119
+ '\\frac'
120
+ | '\\dfrac'
121
+ | '\\tfrac';
122
+ CMD_BINOM: '\\binom';
123
+ CMD_DBINOM: '\\dbinom';
124
+ CMD_TBINOM: '\\tbinom';
125
+
126
+ CMD_MATHIT: '\\mathit';
127
+
128
+ UNDERSCORE: '_';
129
+ CARET: '^';
130
+ COLON: ':';
131
+
132
+ fragment WS_CHAR: [ \t\r\n];
133
+ DIFFERENTIAL: 'd' WS_CHAR*? ([a-zA-Z] | '\\' [a-zA-Z]+);
134
+
135
+ LETTER: [a-zA-Z];
136
+ DIGIT: [0-9];
137
+
138
+ EQUAL: (('&' WS_CHAR*?)? '=') | ('=' (WS_CHAR*? '&')?);
139
+ NEQ: '\\neq';
140
+
141
+ LT: '<';
142
+ LTE: ('\\leq' | '\\le' | LTE_Q | LTE_S);
143
+ LTE_Q: '\\leqq';
144
+ LTE_S: '\\leqslant';
145
+
146
+ GT: '>';
147
+ GTE: ('\\geq' | '\\ge' | GTE_Q | GTE_S);
148
+ GTE_Q: '\\geqq';
149
+ GTE_S: '\\geqslant';
150
+
151
+ BANG: '!';
152
+
153
+ SINGLE_QUOTES: '\''+;
154
+
155
+ SYMBOL: '\\' [a-zA-Z]+;
156
+
157
+ math: relation;
158
+
159
+ relation:
160
+ relation (EQUAL | LT | LTE | GT | GTE | NEQ) relation
161
+ | expr;
162
+
163
+ equality: expr EQUAL expr;
164
+
165
+ expr: additive;
166
+
167
+ additive: additive (ADD | SUB) additive | mp;
168
+
169
+ // mult part
170
+ mp:
171
+ mp (MUL | CMD_TIMES | CMD_CDOT | DIV | CMD_DIV | COLON) mp
172
+ | unary;
173
+
174
+ mp_nofunc:
175
+ mp_nofunc (
176
+ MUL
177
+ | CMD_TIMES
178
+ | CMD_CDOT
179
+ | DIV
180
+ | CMD_DIV
181
+ | COLON
182
+ ) mp_nofunc
183
+ | unary_nofunc;
184
+
185
+ unary: (ADD | SUB) unary | postfix+;
186
+
187
+ unary_nofunc:
188
+ (ADD | SUB) unary_nofunc
189
+ | postfix postfix_nofunc*;
190
+
191
+ postfix: exp postfix_op*;
192
+ postfix_nofunc: exp_nofunc postfix_op*;
193
+ postfix_op: BANG | eval_at;
194
+
195
+ eval_at:
196
+ BAR (eval_at_sup | eval_at_sub | eval_at_sup eval_at_sub);
197
+
198
+ eval_at_sub: UNDERSCORE L_BRACE (expr | equality) R_BRACE;
199
+
200
+ eval_at_sup: CARET L_BRACE (expr | equality) R_BRACE;
201
+
202
+ exp: exp CARET (atom | L_BRACE expr R_BRACE) subexpr? | comp;
203
+
204
+ exp_nofunc:
205
+ exp_nofunc CARET (atom | L_BRACE expr R_BRACE) subexpr?
206
+ | comp_nofunc;
207
+
208
+ comp:
209
+ group
210
+ | abs_group
211
+ | func
212
+ | atom
213
+ | floor
214
+ | ceil;
215
+
216
+ comp_nofunc:
217
+ group
218
+ | abs_group
219
+ | atom
220
+ | floor
221
+ | ceil;
222
+
223
+ group:
224
+ L_PAREN expr R_PAREN
225
+ | L_BRACKET expr R_BRACKET
226
+ | L_BRACE expr R_BRACE
227
+ | L_BRACE_LITERAL expr R_BRACE_LITERAL;
228
+
229
+ abs_group: BAR expr BAR;
230
+
231
+ number: DIGIT+ (',' DIGIT DIGIT DIGIT)* ('.' DIGIT+)?;
232
+
233
+ atom: (LETTER | SYMBOL) (subexpr? SINGLE_QUOTES? | SINGLE_QUOTES? subexpr?)
234
+ | number
235
+ | DIFFERENTIAL
236
+ | mathit
237
+ | frac
238
+ | binom
239
+ | bra
240
+ | ket;
241
+
242
+ bra: L_ANGLE expr (R_BAR | BAR);
243
+ ket: (L_BAR | BAR) expr R_ANGLE;
244
+
245
+ mathit: CMD_MATHIT L_BRACE mathit_text R_BRACE;
246
+ mathit_text: LETTER*;
247
+
248
+ frac: CMD_FRAC (upperd = DIGIT | L_BRACE upper = expr R_BRACE)
249
+ (lowerd = DIGIT | L_BRACE lower = expr R_BRACE);
250
+
251
+ binom:
252
+ (CMD_BINOM | CMD_DBINOM | CMD_TBINOM) L_BRACE n = expr R_BRACE L_BRACE k = expr R_BRACE;
253
+
254
+ floor: L_FLOOR val = expr R_FLOOR;
255
+ ceil: L_CEIL val = expr R_CEIL;
256
+
257
+ func_normal:
258
+ FUNC_EXP
259
+ | FUNC_LOG
260
+ | FUNC_LG
261
+ | FUNC_LN
262
+ | FUNC_SIN
263
+ | FUNC_COS
264
+ | FUNC_TAN
265
+ | FUNC_CSC
266
+ | FUNC_SEC
267
+ | FUNC_COT
268
+ | FUNC_ARCSIN
269
+ | FUNC_ARCCOS
270
+ | FUNC_ARCTAN
271
+ | FUNC_ARCCSC
272
+ | FUNC_ARCSEC
273
+ | FUNC_ARCCOT
274
+ | FUNC_SINH
275
+ | FUNC_COSH
276
+ | FUNC_TANH
277
+ | FUNC_ARSINH
278
+ | FUNC_ARCOSH
279
+ | FUNC_ARTANH;
280
+
281
+ func:
282
+ func_normal (subexpr? supexpr? | supexpr? subexpr?) (
283
+ L_PAREN func_arg R_PAREN
284
+ | func_arg_noparens
285
+ )
286
+ | (LETTER | SYMBOL) (subexpr? SINGLE_QUOTES? | SINGLE_QUOTES? subexpr?) // e.g. f(x), f_1'(x)
287
+ L_PAREN args R_PAREN
288
+ | FUNC_INT (subexpr supexpr | supexpr subexpr)? (
289
+ additive? DIFFERENTIAL
290
+ | frac
291
+ | additive
292
+ )
293
+ | FUNC_SQRT (L_BRACKET root = expr R_BRACKET)? L_BRACE base = expr R_BRACE
294
+ | FUNC_OVERLINE L_BRACE base = expr R_BRACE
295
+ | (FUNC_SUM | FUNC_PROD) (subeq supexpr | supexpr subeq) mp
296
+ | FUNC_LIM limit_sub mp;
297
+
298
+ args: (expr ',' args) | expr;
299
+
300
+ limit_sub:
301
+ UNDERSCORE L_BRACE (LETTER | SYMBOL) LIM_APPROACH_SYM expr (
302
+ CARET ((L_BRACE (ADD | SUB) R_BRACE) | ADD | SUB)
303
+ )? R_BRACE;
304
+
305
+ func_arg: expr | (expr ',' func_arg);
306
+ func_arg_noparens: mp_nofunc;
307
+
308
+ subexpr: UNDERSCORE (atom | L_BRACE expr R_BRACE);
309
+ supexpr: CARET (atom | L_BRACE expr R_BRACE);
310
+
311
+ subeq: UNDERSCORE L_BRACE equality R_BRACE;
312
+ supeq: UNDERSCORE L_BRACE equality R_BRACE;
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__init__.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sympy.external import import_module
2
+ from sympy.utilities.decorator import doctest_depends_on
3
+
4
+ from sympy.parsing.latex.lark import LarkLaTeXParser, TransformToSymPyExpr, parse_latex_lark # noqa
5
+
6
+ from .errors import LaTeXParsingError # noqa
7
+
8
+
9
+ __doctest_requires__ = {('parse_latex',): ['antlr4', 'lark']}
10
+
11
+
12
+ @doctest_depends_on(modules=('antlr4', 'lark'))
13
+ def parse_latex(s, strict=False, backend="antlr"):
14
+ r"""Converts the input LaTeX string ``s`` to a SymPy ``Expr``.
15
+
16
+ Parameters
17
+ ==========
18
+
19
+ s : str
20
+ The LaTeX string to parse. In Python source containing LaTeX,
21
+ *raw strings* (denoted with ``r"``, like this one) are preferred,
22
+ as LaTeX makes liberal use of the ``\`` character, which would
23
+ trigger escaping in normal Python strings.
24
+ backend : str, optional
25
+ Currently, there are two backends supported: ANTLR, and Lark.
26
+ The default setting is to use the ANTLR backend, which can be
27
+ changed to Lark if preferred.
28
+
29
+ Use ``backend="antlr"`` for the ANTLR-based parser, and
30
+ ``backend="lark"`` for the Lark-based parser.
31
+
32
+ The ``backend`` option is case-sensitive, and must be in
33
+ all lowercase.
34
+ strict : bool, optional
35
+ This option is only available with the ANTLR backend.
36
+
37
+ If True, raise an exception if the string cannot be parsed as
38
+ valid LaTeX. If False, try to recover gracefully from common
39
+ mistakes.
40
+
41
+ Examples
42
+ ========
43
+
44
+ >>> from sympy.parsing.latex import parse_latex
45
+ >>> expr = parse_latex(r"\frac {1 + \sqrt {\a}} {\b}")
46
+ >>> expr
47
+ (sqrt(a) + 1)/b
48
+ >>> expr.evalf(4, subs=dict(a=5, b=2))
49
+ 1.618
50
+ >>> func = parse_latex(r"\int_1^\alpha \dfrac{\mathrm{d}t}{t}", backend="lark")
51
+ >>> func.evalf(subs={"alpha": 2})
52
+ 0.693147180559945
53
+ """
54
+
55
+ if backend == "antlr":
56
+ _latex = import_module(
57
+ 'sympy.parsing.latex._parse_latex_antlr',
58
+ import_kwargs={'fromlist': ['X']})
59
+
60
+ if _latex is not None:
61
+ return _latex.parse_latex(s, strict)
62
+ elif backend == "lark":
63
+ return parse_latex_lark(s)
64
+ else:
65
+ raise NotImplementedError(f"Using the '{backend}' backend in the LaTeX" \
66
+ " parser is not supported.")
deepseekvl2/lib/python3.10/site-packages/sympy/parsing/latex/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (2.39 kB). View file