Upload folder using huggingface_hub
Browse files- NOTICE +779 -0
- README.md +1315 -3
- chat_template.json +3 -0
- config.json +51 -0
- configuration_dots.py +77 -0
- dots.ocr LICENSE AGREEMENT +109 -0
- generation_config.json +7 -0
- merges.txt +0 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +650 -0
- modeling_dots_ocr.py +131 -0
- modeling_dots_ocr_vllm.py +451 -0
- modeling_dots_vision.py +520 -0
- preprocessor_config.json +22 -0
- special_tokens_map.json +25 -0
- tokenizer.json +0 -0
- tokenizer_config.json +391 -0
- vocab.json +0 -0
NOTICE
ADDED
|
@@ -0,0 +1,779 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
==================================================================
|
| 2 |
+
=============== Copyright Notice and License Texts ===============
|
| 3 |
+
==================================================================
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
------------- LICENSE FOR gradio CODE --------------
|
| 7 |
+
|
| 8 |
+
Copyright notice:No copyright info provided
|
| 9 |
+
|
| 10 |
+
License:apache2.0
|
| 11 |
+
|
| 12 |
+
Apache License
|
| 13 |
+
Version 2.0, January 2004
|
| 14 |
+
http://www.apache.org/licenses/
|
| 15 |
+
|
| 16 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 17 |
+
|
| 18 |
+
1. Definitions.
|
| 19 |
+
|
| 20 |
+
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
| 21 |
+
|
| 22 |
+
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
| 23 |
+
|
| 24 |
+
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition,
|
| 25 |
+
|
| 26 |
+
"control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
| 27 |
+
|
| 28 |
+
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
| 29 |
+
|
| 30 |
+
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
| 31 |
+
|
| 32 |
+
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
| 33 |
+
|
| 34 |
+
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
| 35 |
+
|
| 36 |
+
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
| 37 |
+
|
| 38 |
+
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
| 39 |
+
|
| 40 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
| 41 |
+
|
| 42 |
+
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
| 43 |
+
|
| 44 |
+
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their
|
| 45 |
+
Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
| 46 |
+
|
| 47 |
+
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
| 48 |
+
|
| 49 |
+
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
| 50 |
+
|
| 51 |
+
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
|
| 52 |
+
|
| 53 |
+
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
| 54 |
+
|
| 55 |
+
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
| 56 |
+
|
| 57 |
+
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
| 58 |
+
|
| 59 |
+
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions.Not withstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
| 60 |
+
|
| 61 |
+
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
| 62 |
+
|
| 63 |
+
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
| 64 |
+
|
| 65 |
+
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
| 66 |
+
|
| 67 |
+
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
| 68 |
+
|
| 69 |
+
END OF TERMS AND CONDITIONS
|
| 70 |
+
|
| 71 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 72 |
+
|
| 73 |
+
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
| 74 |
+
|
| 75 |
+
Copyright [yyyy] [name of copyright owner]
|
| 76 |
+
|
| 77 |
+
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
|
| 78 |
+
|
| 79 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 80 |
+
|
| 81 |
+
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
------------- LICENSE FOR gradio_image_annotation CODE --------------
|
| 86 |
+
|
| 87 |
+
Copyright notice:Copyright (c) 2024 Edgar Gracia
|
| 88 |
+
License :MIT
|
| 89 |
+
MIT License
|
| 90 |
+
|
| 91 |
+
Copyright (c) 2024 Edgar Gracia
|
| 92 |
+
|
| 93 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
| 94 |
+
|
| 95 |
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
| 96 |
+
|
| 97 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
------------- LICENSE FOR PyMuPDF CODE --------------
|
| 102 |
+
|
| 103 |
+
Copyright notice:Copyright (C) 2007 Free Software Foundation, Inc.
|
| 104 |
+
|
| 105 |
+
License :AGPL-3.0 license
|
| 106 |
+
|
| 107 |
+
GNU AFFERO GENERAL PUBLIC LICENSE
|
| 108 |
+
Version 3, 19 November 2007
|
| 109 |
+
|
| 110 |
+
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
| 111 |
+
Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
|
| 112 |
+
|
| 113 |
+
Preamble
|
| 114 |
+
|
| 115 |
+
The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software.
|
| 116 |
+
|
| 117 |
+
The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users.
|
| 118 |
+
|
| 119 |
+
When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things.
|
| 120 |
+
|
| 121 |
+
Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software.
|
| 122 |
+
|
| 123 |
+
A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public.
|
| 124 |
+
|
| 125 |
+
The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version.
|
| 126 |
+
|
| 127 |
+
An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license.
|
| 128 |
+
|
| 129 |
+
The precise terms and conditions for copying, distribution and modification follow.
|
| 130 |
+
|
| 131 |
+
TERMS AND CONDITIONS
|
| 132 |
+
|
| 133 |
+
0. Definitions.
|
| 134 |
+
|
| 135 |
+
"This License" refers to version 3 of the GNU Affero General Public License.
|
| 136 |
+
|
| 137 |
+
"Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
|
| 138 |
+
|
| 139 |
+
"The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations.
|
| 140 |
+
|
| 141 |
+
To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work.
|
| 142 |
+
|
| 143 |
+
A "covered work" means either the unmodified Program or a work based on theProgram.
|
| 144 |
+
|
| 145 |
+
To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well.
|
| 146 |
+
|
| 147 |
+
To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying.
|
| 148 |
+
|
| 149 |
+
An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion.
|
| 150 |
+
|
| 151 |
+
1. Source Code.
|
| 152 |
+
|
| 153 |
+
The "source code" for a work means the preferred form of the work for makingmodifications to it. "Object code" means any non-source form of a work.
|
| 154 |
+
|
| 155 |
+
A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language.
|
| 156 |
+
|
| 157 |
+
The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Co ponent, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which theexecutable work runs, or a compiler used to produce the work, or an object code interpreter used to run it.
|
| 158 |
+
|
| 159 |
+
The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work.
|
| 160 |
+
|
| 161 |
+
The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
|
| 162 |
+
|
| 163 |
+
The Corresponding Source for a work in source code form is that same work.
|
| 164 |
+
|
| 165 |
+
2. Basic Permissions.
|
| 166 |
+
|
| 167 |
+
All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
|
| 168 |
+
|
| 169 |
+
You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you.
|
| 170 |
+
|
| 171 |
+
Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
|
| 172 |
+
|
| 173 |
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
| 174 |
+
|
| 175 |
+
No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures.
|
| 176 |
+
|
| 177 |
+
When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures.
|
| 178 |
+
|
| 179 |
+
4. Conveying Verbatim Copies.
|
| 180 |
+
|
| 181 |
+
You may convey verbatim copies of the Program's source code as you receive it in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program.
|
| 182 |
+
|
| 183 |
+
You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee.
|
| 184 |
+
5. Conveying Modified Source Versions.
|
| 185 |
+
|
| 186 |
+
You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions:
|
| 187 |
+
|
| 188 |
+
a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
|
| 189 |
+
|
| 190 |
+
b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices".
|
| 191 |
+
|
| 192 |
+
c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it.
|
| 193 |
+
|
| 194 |
+
d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
|
| 195 |
+
|
| 196 |
+
A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate.
|
| 197 |
+
|
| 198 |
+
6. Conveying Non-Source Forms.
|
| 199 |
+
|
| 200 |
+
You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways:
|
| 201 |
+
|
| 202 |
+
a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on an adurable physical medium customarily used for software interchange.
|
| 203 |
+
|
| 204 |
+
b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge.
|
| 205 |
+
|
| 206 |
+
c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b.
|
| 207 |
+
|
| 208 |
+
d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements.
|
| 209 |
+
|
| 210 |
+
e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
|
| 211 |
+
|
| 212 |
+
A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work.
|
| 213 |
+
|
| 214 |
+
A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product.
|
| 215 |
+
|
| 216 |
+
"Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made.
|
| 217 |
+
|
| 218 |
+
If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM).
|
| 219 |
+
|
| 220 |
+
The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network.
|
| 221 |
+
|
| 222 |
+
Corresponding Source conveyed, and Installation Information provided, in accordance with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying.
|
| 223 |
+
|
| 224 |
+
7. Additional Terms.
|
| 225 |
+
|
| 226 |
+
"Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separatelyunder those permissions, but the entire Program remains governed by this License without regard to the additional permissions.
|
| 227 |
+
|
| 228 |
+
When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission.
|
| 229 |
+
|
| 230 |
+
Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms:
|
| 231 |
+
|
| 232 |
+
a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
|
| 233 |
+
|
| 234 |
+
b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or
|
| 235 |
+
|
| 236 |
+
c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or
|
| 237 |
+
|
| 238 |
+
d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
|
| 239 |
+
|
| 240 |
+
e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
|
| 241 |
+
|
| 242 |
+
f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors.
|
| 243 |
+
|
| 244 |
+
All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying.
|
| 245 |
+
|
| 246 |
+
If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
|
| 247 |
+
|
| 248 |
+
Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way.
|
| 249 |
+
|
| 250 |
+
8. Termination.
|
| 251 |
+
|
| 252 |
+
You may not propagate or modify a covered work except as expressly providedunder this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11).
|
| 253 |
+
|
| 254 |
+
However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation.
|
| 255 |
+
|
| 256 |
+
Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after
|
| 257 |
+
your receipt of the notice.
|
| 258 |
+
|
| 259 |
+
Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10.
|
| 260 |
+
|
| 261 |
+
9. Acceptance Not Required for Having Copies.
|
| 262 |
+
|
| 263 |
+
You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so.
|
| 264 |
+
|
| 265 |
+
10. Automatic Licensing of Downstream Recipients.
|
| 266 |
+
|
| 267 |
+
Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License.
|
| 268 |
+
|
| 269 |
+
An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts.
|
| 270 |
+
|
| 271 |
+
You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
|
| 272 |
+
|
| 273 |
+
11. Patents.
|
| 274 |
+
|
| 275 |
+
A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version".
|
| 276 |
+
|
| 277 |
+
A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License.
|
| 278 |
+
|
| 279 |
+
Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version.
|
| 280 |
+
|
| 281 |
+
In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party.
|
| 282 |
+
|
| 283 |
+
If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid.
|
| 284 |
+
|
| 285 |
+
If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it.
|
| 286 |
+
|
| 287 |
+
A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
|
| 288 |
+
|
| 289 |
+
Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law.
|
| 290 |
+
|
| 291 |
+
12. No Surrender of Others' Freedom.
|
| 292 |
+
|
| 293 |
+
If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program.
|
| 294 |
+
|
| 295 |
+
13. Remote Network Interaction; Use with the GNU General Public License.
|
| 296 |
+
|
| 297 |
+
Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to theCorresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph.
|
| 298 |
+
|
| 299 |
+
Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version3 of the GNU General Public License.
|
| 300 |
+
|
| 301 |
+
14. Revised Versions of this License.
|
| 302 |
+
|
| 303 |
+
The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
|
| 304 |
+
|
| 305 |
+
Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the
|
| 306 |
+
GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation.
|
| 307 |
+
|
| 308 |
+
If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program.
|
| 309 |
+
|
| 310 |
+
Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version.
|
| 311 |
+
|
| 312 |
+
15. Disclaimer of Warranty.
|
| 313 |
+
|
| 314 |
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
| 315 |
+
|
| 316 |
+
16. Limitation of Liability.
|
| 317 |
+
|
| 318 |
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
| 319 |
+
|
| 320 |
+
17. Interpretation of Sections 15 and 16.
|
| 321 |
+
|
| 322 |
+
If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a
|
| 323 |
+
copy of the Program in return for a fee.
|
| 324 |
+
|
| 325 |
+
END OF TERMS AND CONDITIONS
|
| 326 |
+
|
| 327 |
+
How to Apply These Terms to Your New Programs
|
| 328 |
+
|
| 329 |
+
If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
|
| 330 |
+
|
| 331 |
+
To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found.
|
| 332 |
+
|
| 333 |
+
<one line to give the program's name and a brief idea of what it does.> Copyright (C) <year> <name of author>
|
| 334 |
+
|
| 335 |
+
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
| 336 |
+
|
| 337 |
+
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
|
| 338 |
+
|
| 339 |
+
You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
|
| 340 |
+
|
| 341 |
+
Also add information on how to contact you by electronic and paper mail.
|
| 342 |
+
|
| 343 |
+
If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements.
|
| 344 |
+
|
| 345 |
+
You should also get your employer (if you work as a programmer) or school,
|
| 346 |
+
if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see <http://www.gnu.org/licenses/>.
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
------------- LICENSE FOR openai CODE --------------
|
| 351 |
+
|
| 352 |
+
Copyright notice:Copyright 2025 OpenAI
|
| 353 |
+
|
| 354 |
+
License:apache2.0
|
| 355 |
+
|
| 356 |
+
Please see above.
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
------------- LICENSE FOR qwen_vl_utils CODE --------------
|
| 361 |
+
|
| 362 |
+
Copyright notice:No copyright info provided
|
| 363 |
+
|
| 364 |
+
License:apache2.0
|
| 365 |
+
|
| 366 |
+
Please see above.
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
------------- LICENSE FOR transformers CODE --------------
|
| 371 |
+
|
| 372 |
+
Copyright notice:Copyright 2018- The Hugging Face team. All rights reserved.
|
| 373 |
+
|
| 374 |
+
License:apache2.0
|
| 375 |
+
|
| 376 |
+
Please see above.
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
------------- LICENSE FOR huggingface_hub CODE --------------
|
| 381 |
+
|
| 382 |
+
Copyright notice:No copyright info provided
|
| 383 |
+
|
| 384 |
+
License:apache2.0
|
| 385 |
+
|
| 386 |
+
Please see above.
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
------------- LICENSE FOR flash-attn CODE --------------
|
| 391 |
+
|
| 392 |
+
Copyright notice:Copyright (c) 2022, the respective contributors, as shown by the AUTHORS file. All rights reserved.
|
| 393 |
+
|
| 394 |
+
License:BSD-3-Clause license
|
| 395 |
+
|
| 396 |
+
BSD 3-Clause License
|
| 397 |
+
|
| 398 |
+
Copyright (c) 2022, the respective contributors, as shown by the AUTHORS file. All rights reserved.
|
| 399 |
+
|
| 400 |
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
| 401 |
+
|
| 402 |
+
* Redistributions of source code must retain the above copyright notice, this list ofconditions and the following disclaimer.
|
| 403 |
+
|
| 404 |
+
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
| 405 |
+
|
| 406 |
+
* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
| 407 |
+
|
| 408 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 409 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISEDOF THE POSSIBILITY OF SUCH DAMAGE.
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
------------- LICENSE FOR accelerate CODE --------------
|
| 414 |
+
|
| 415 |
+
Copyright notice:No copyright info provided
|
| 416 |
+
|
| 417 |
+
License:apache2.0
|
| 418 |
+
|
| 419 |
+
Please see above.
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
------------- LICENSE FOR MonkeyOCR CODE --------------
|
| 424 |
+
|
| 425 |
+
Copyright notice:No copyright info provided
|
| 426 |
+
|
| 427 |
+
License:apache2.0
|
| 428 |
+
|
| 429 |
+
Please see above.
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
------------- LICENSE FOR OmniDocbench CODE --------------
|
| 434 |
+
|
| 435 |
+
Copyright notice:No copyright info provided
|
| 436 |
+
|
| 437 |
+
License:apache2.0
|
| 438 |
+
|
| 439 |
+
Please see above.
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
------------- LICENSE FOR Qwen2.5-VL CODE --------------
|
| 444 |
+
|
| 445 |
+
Copyright notice:No copyright info provided
|
| 446 |
+
|
| 447 |
+
License:apache2.0
|
| 448 |
+
|
| 449 |
+
Please see above.
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
------------- LICENSE FOR aimv2 CODE --------------
|
| 454 |
+
|
| 455 |
+
Copyright notice: Copyright (C) 2024 Apple Inc. All Rights Reserved.
|
| 456 |
+
|
| 457 |
+
License:
|
| 458 |
+
|
| 459 |
+
IMPORTANT: This Apple software is supplied to you by Apple Inc. ("Apple") in consideration of your agreement to the following terms, and your use, installation, modification or redistribution of this Apple software constitutes acceptance of these terms. If you do not agree with these terms, please do not use, install, modify or
|
| 460 |
+
redistribute this Apple software.
|
| 461 |
+
|
| 462 |
+
In consideration of your agreement to abide by the following terms, and subject to these terms, Apple grants you a personal, non-exclusive license, under Apple's copyrights in this original Apple software (the "Apple Software"), to use, reproduce, modify and redistribute the Apple Software, with or without modifications, in source and/or binary forms; provided that if you redistribute the Apple Software in its entirety and without modifications, you must retain this notice and the following text and disclaimers in all such redistributions of the Apple Software. Neither the name, trademarks, service marks or logos of Apple Inc. May be used to endorse or promote products derived from the Apple Software without specific prior written permission from Apple. Except as expressly stated in this notice, no other rights or licenses, express or implied, are granted by Apple herein, including but not limited to any patent rights that may be infringed by your derivative works or by other works in which the Apple Software may be incorporated.
|
| 463 |
+
|
| 464 |
+
The Apple Software is provided by Apple on an "AS IS" basis. APPLE MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
|
| 465 |
+
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
|
| 466 |
+
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 467 |
+
|
| 468 |
+
SOFTWARE DISTRIBUTED WITH AUTOREGRESSIVE IMAGE MODELS:
|
| 469 |
+
|
| 470 |
+
The Autoregressive Image Models software includes a number of subcomponents with
|
| 471 |
+
separate copyright notices and license terms - please see the file ACKNOWLEDGEMENTS.
|
| 472 |
+
|
| 473 |
+
Acknowledgements:
|
| 474 |
+
|
| 475 |
+
Portions of the Autoregressive Image Models project may utilize the following copyrighted material, the use of which is hereby acknowledged.
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
------------- LICENSE FOR Hugging Face CODE --------------
|
| 479 |
+
|
| 480 |
+
Copyright notice:Copyright 2019 Ross Wightman
|
| 481 |
+
|
| 482 |
+
License:apache2.0
|
| 483 |
+
|
| 484 |
+
Please see above.
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
|
| 488 |
+
------------- LICENSE FOR vLLM CODE --------------
|
| 489 |
+
|
| 490 |
+
Copyright notice:No copyright info provided
|
| 491 |
+
|
| 492 |
+
License:apache2.0
|
| 493 |
+
|
| 494 |
+
Please see above.
|
| 495 |
+
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
------------- LICENSE FOR Doclaynet --------------
|
| 499 |
+
|
| 500 |
+
Copyright notice:No copyright info provided
|
| 501 |
+
|
| 502 |
+
License:Community Data License Agreement
|
| 503 |
+
|
| 504 |
+
Community Data License Agreement – Permissive – Version 1.0
|
| 505 |
+
|
| 506 |
+
This is the Community Data License Agreement – Permissive, Version 1.0 (“Agreement”). Data is provided to You under this Agreement by each of the Data Providers. Your exercise of any of the rights and permissions granted below constitutes your acceptance and agreement to be bound by the terms and conditions of this Agreement.
|
| 507 |
+
|
| 508 |
+
The benefits that each Data Provider receives from making Data available and that You receive from Data or otherwise under these terms and conditions shall be deemed sufficient consideration for the formation of this Agreement. Accordingly, Data Provider(s) and You (the "Parties") agree as follows:
|
| 509 |
+
|
| 510 |
+
Section 1. Definitions
|
| 511 |
+
|
| 512 |
+
1.1 "Add" means to supplement Data with Your own or someone else's Data, resulting in Your “Additions.” Additions do not include Results.
|
| 513 |
+
|
| 514 |
+
1.2 "Computational Use" means Your analysis (through the use of computational devices or otherwise) or other interpretation of Data. By way of example and not limitation, "Computational Use" includes the application of any computational analytical technique, the purpose of which is the analysis of any Data in digital form to generate information about Data such as patterns, trends, correlations, inferences, insights and attributes.
|
| 515 |
+
|
| 516 |
+
1.3 "Data" means the information (including copyrightable information, such as images or text), collectively or individually, whether created or gathered by a Data Provider or an Entity acting on its behalf, to which rights are granted under this Agreement.
|
| 517 |
+
|
| 518 |
+
1.4 "Data Provider" means any Entity (including any employee or contractor of such Entity authorized to Publish Data on behalf of such Entity) that Publishes Data under this Agreement prior to Your Receiving it.
|
| 519 |
+
|
| 520 |
+
1.5 "Enhanced Data" means the subset of Data that You Publish and that is composed of (a) Your Additions and/or (b) Modifications to Data You have received under this Agreement.
|
| 521 |
+
|
| 522 |
+
1.6 "Entity" means any natural person or organization that exists under the laws of the jurisdiction in which it is organized, together with all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (a) the power, directly or indirectly, to cause the direction or management of such entity, whether by contract or otherwise, (b) the ownership of more than fifty percent (50%) of the outstanding shares or securities, (c) the beneficial ownership of such entity or, (d) the ability to appoint, whether by agreement or right, the majority of directors of an Entity.
|
| 523 |
+
|
| 524 |
+
1.7 "Modify" means to delete, erase, correct or re-arrange Data, resulting in “Modifications.” Modifications do not include Results.
|
| 525 |
+
|
| 526 |
+
1.8 "Publish" means to make all or a subset of Data (including Your Enhanced Data) available in any manner which enables its use, including by providing a copy on physical media or remote access. For any form of Entity, that is to make the Data available to any individual who is not employed by that Entity or engaged as a contractor or agent to perform work on that Entity's behalf. A "Publication" occurs each time you Publish Data.
|
| 527 |
+
|
| 528 |
+
1.9 "Receive" or "Receives" means to have been given access to Data, locally or remotely.
|
| 529 |
+
|
| 530 |
+
1.10 "Results" means the outcomes or outputs that You obtain from Your Computational Use of Data. Results shall not include more than a de minimis portion of the Data on which the Computational Use is based.
|
| 531 |
+
|
| 532 |
+
1.11 "Sui Generis Database Rights" means rights, other than copyright, resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other equivalent rights anywhere in the world.
|
| 533 |
+
|
| 534 |
+
1.12 "Use" means using Data (including accessing, copying, studying, reviewing, adapting, analyzing, evaluating, or making Computational Use of it), either by machines or humans, or a combination of both.
|
| 535 |
+
|
| 536 |
+
1.13 "You" or "Your" means any Entity that Receives Data under this Agreement.
|
| 537 |
+
|
| 538 |
+
Section 2. Right and License to Use and to Publish
|
| 539 |
+
|
| 540 |
+
2.1 Subject to the conditions set forth in Section 3 of this Agreement, Data Provider(s) hereby grant(s) to You a worldwide, non-exclusive, irrevocable (except as provided in Section 5) right to: (a) Use Data; and (b) Publish Data.
|
| 541 |
+
|
| 542 |
+
2.2 To the extent that the Data or the coordination, selection or arrangement of Data is protected or protectable under copyright, Sui Generis Database Rights, or other law, Data Provider(s) further agree(s) that such Data or coordination, selection or arrangement is hereby licensed to You and to anyone else who Receives Data under this Agreement for Use and Publication, subject to the conditions set forth in Section 3 of this Agreement.
|
| 543 |
+
|
| 544 |
+
2.3 Except for these rights and licenses expressly granted, no other intellectual property rights are granted or should be implied.
|
| 545 |
+
|
| 546 |
+
Section 3. Conditions on Rights Granted
|
| 547 |
+
|
| 548 |
+
3.1 If You Publish Data You Receive or Enhanced Data:
|
| 549 |
+
|
| 550 |
+
(a) You may do so under a license of your choice provided that you give anyone who receives the data from you the text of this Agreement, the name of this Agreement and/or a hyperlink or other method reasonably likely to provide a copy of the text of this Agreement; and
|
| 551 |
+
|
| 552 |
+
(b) You must cause any Data files containing Enhanced Data to carry prominent notices that you have changed those files; and
|
| 553 |
+
|
| 554 |
+
(c) If You Publish Data You Receive, You must preserve all credit or attribution to the Data Provider(s). Such retained credit or attribution includes any of the following to the extent they exist in the Data as You have Received it: legal notices or metadata; identification of the Data Provider(s); or hyperlinks to Data to the extent it is practical to do so.
|
| 555 |
+
|
| 556 |
+
3.2 You may provide additional or different license terms and conditions for use, reproduction, or distribution of that Enhanced Data, or for any combination of Data and Enhanced Data as a whole, provided that Your Use and Publication of that combined Data otherwise complies with the conditions stated in this License.
|
| 557 |
+
|
| 558 |
+
3.3 You and each Data Provider agree that Enhanced Data shall not be considered a work of joint authorship by virtue of its relationship to Data licensed under this Agreement and shall not require either any obligation of accounting to or the consent of any Data Provider.
|
| 559 |
+
|
| 560 |
+
3.4 This Agreement imposes no obligations or restrictions on Your Use or Publication of Results.
|
| 561 |
+
|
| 562 |
+
Section 4. Data Provider(s)' Representations
|
| 563 |
+
|
| 564 |
+
4.1 Each Data Provider represents that the Data Provider has exercised reasonable care, to assure that: (a) the Data it Publishes was created or generated by it or was obtained from others with the right to Publish the Data under this Agreement; and (b) Publication of such Data does not violate any privacy or confidentiality obligation undertaken by the Data Provider.
|
| 565 |
+
|
| 566 |
+
Section 5. Termination
|
| 567 |
+
|
| 568 |
+
5.1 All of Your rights under this Agreement will terminate, and Your right to Receive, Use or Publish the Data will be revoked or modified if You materially fail to comply with the terms and conditions of this Agreement and You do not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If your rights under this Agreement terminate, you agree to cease Receipt, Use and Publication of Data. However, your obligations and any rights and permissions granted by you under this Agreement relating to Data that you published prior to such termination will continue and survive.
|
| 569 |
+
|
| 570 |
+
5.2 If you institute litigation against a Data Provider or anyone else who Receives the Data (including a cross-claim in a lawsuit) based on the Data, other than a claim asserting breach of this Agreement, then any rights previously granted to You to Receive, Use and Publish Data under this Agreement will terminate as of the date such litigation is filed.
|
| 571 |
+
|
| 572 |
+
Section 6. Disclaimer of Warranties and Limitation of Liability
|
| 573 |
+
|
| 574 |
+
6.1 EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE DATA (INCLUDING ENHANCED DATA) IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
|
| 575 |
+
|
| 576 |
+
6.2 NEITHER YOU NOR ANY DATA PROVIDERS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE DATA OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
| 577 |
+
|
| 578 |
+
Section 7. Miscellaneous
|
| 579 |
+
|
| 580 |
+
7.1 You agree that it is solely your responsibility to comply with all applicable laws with regard to Your Use or Publication of Data, including any applicable privacy, data protection, security and export laws. You agree to take reasonable steps to assist a Data Provider fulfilling responsibilities to comply with applicable laws with regard to Use or Publication of Data Received hereunder.
|
| 581 |
+
|
| 582 |
+
7.2 You and Data Provider(s), collectively and individually, waive and/or agree not to assert, to the extent permitted by law, any moral rights you or they hold in Data.
|
| 583 |
+
|
| 584 |
+
7.3 This Agreement confers no rights or remedies upon any person or entity other than the Parties and their respective heirs, executors, successors and assigns.
|
| 585 |
+
|
| 586 |
+
7.4 The Data Provider(s) reserve no right or expectation of privacy, data protection or confidentiality in any Data that they Publish under this Agreement. If you choose to Publish Data under this Agreement, you similarly do so with no reservation or expectation of any rights of privacy or confidentiality in that Data.
|
| 587 |
+
|
| 588 |
+
7.5 The Community Data License Agreement workgroup under The Linux Foundation is the steward of this Agreement (“Steward”). No one other than the Steward has the right to modify or publish new versions of this Agreement. Each version will be given a distinguishing version number. You may Use and Publish Data Received hereunder under the terms of the version of the Agreement under which You originally Received the Data, or under the terms of any subsequent version published by the Steward.
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
------------- LICENSE FOR M6Doc --------------
|
| 593 |
+
|
| 594 |
+
Copyright notice:No copyright info provided
|
| 595 |
+
|
| 596 |
+
License:Attribution-NonCommercial-NoDerivatives 4.0
|
| 597 |
+
|
| 598 |
+
=======Attribution-NonCommercial-NoDerivatives 4.0 International==========
|
| 599 |
+
|
| 600 |
+
Creative Commons Corporation ("Creative Commons") is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an "as-is" basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible.
|
| 601 |
+
|
| 602 |
+
Using Creative Commons Public Licenses
|
| 603 |
+
|
| 604 |
+
Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses.
|
| 605 |
+
|
| 606 |
+
Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC- licensed material, or material used under an exception or limitation to copyright. More considerations for licensors: wiki.creativecommons.org/Considerations_for_licensors
|
| 607 |
+
|
| 608 |
+
Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor's permission is not necessary for any reason--for example, because of any applicable exception or limitation to copyright--then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public: wiki.creativecommons.org/Considerations_for_licensees
|
| 609 |
+
|
| 610 |
+
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International Public License
|
| 611 |
+
|
| 612 |
+
By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions.
|
| 613 |
+
|
| 614 |
+
|
| 615 |
+
Section 1 -- Definitions.
|
| 616 |
+
|
| 617 |
+
a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image.
|
| 618 |
+
|
| 619 |
+
b. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights.
|
| 620 |
+
|
| 621 |
+
c. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements.
|
| 622 |
+
|
| 623 |
+
d. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material.
|
| 624 |
+
|
| 625 |
+
e. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License.
|
| 626 |
+
|
| 627 |
+
f. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license.
|
| 628 |
+
|
| 629 |
+
g. Licensor means the individual(s) or entity(ies) granting rights under this Public License.
|
| 630 |
+
|
| 631 |
+
h. NonCommercial means not primarily intended for or directed towards commercial advantage or monetary compensation. For purposes of this Public License, the exchange of the Licensed Material for other material subject to Copyright and Similar Rights by digital file-sharing or similar means is NonCommercial provided there is no payment of monetary compensation in connection with the exchange.
|
| 632 |
+
|
| 633 |
+
i. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them.
|
| 634 |
+
|
| 635 |
+
j. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world.
|
| 636 |
+
|
| 637 |
+
k. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning.
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
Section 2 -- Scope.
|
| 641 |
+
|
| 642 |
+
a. License granted.
|
| 643 |
+
|
| 644 |
+
1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to:
|
| 645 |
+
|
| 646 |
+
a. reproduce and Share the Licensed Material, in whole or in part, for NonCommercial purposes only; and
|
| 647 |
+
|
| 648 |
+
b. produce and reproduce, but not Share, Adapted Material for NonCommercial purposes only.
|
| 649 |
+
|
| 650 |
+
2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions.
|
| 651 |
+
|
| 652 |
+
3. Term. The term of this Public License is specified in Section 6(a).
|
| 653 |
+
|
| 654 |
+
4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a) (4) never produces Adapted Material.
|
| 655 |
+
|
| 656 |
+
5. Downstream recipients.
|
| 657 |
+
|
| 658 |
+
a. Offer from the Licensor -- Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License.
|
| 659 |
+
|
| 660 |
+
b. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material.
|
| 661 |
+
|
| 662 |
+
6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that you are, or that your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i).
|
| 663 |
+
|
| 664 |
+
b. Other rights.
|
| 665 |
+
|
| 666 |
+
1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise.
|
| 667 |
+
|
| 668 |
+
2. Patent and trademark rights are not licensed under this Public License.
|
| 669 |
+
|
| 670 |
+
3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties, including when the Licensed Material is used other than for NonCommercial purposes.
|
| 671 |
+
|
| 672 |
+
|
| 673 |
+
Section 3 -- License Conditions.
|
| 674 |
+
|
| 675 |
+
Your exercise of the Licensed Rights is expressly made subject to the
|
| 676 |
+
following conditions.
|
| 677 |
+
|
| 678 |
+
a. Attribution.
|
| 679 |
+
|
| 680 |
+
1. If You Share the Licensed Material, You must:
|
| 681 |
+
|
| 682 |
+
a. retain the following if it is supplied by the Licensor with the Licensed Material:
|
| 683 |
+
|
| 684 |
+
i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated);
|
| 685 |
+
|
| 686 |
+
ii. a copyright notice;
|
| 687 |
+
|
| 688 |
+
iii. a notice that refers to this Public License;
|
| 689 |
+
|
| 690 |
+
iv. a notice that refers to the disclaimer of warranties;
|
| 691 |
+
|
| 692 |
+
v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable;
|
| 693 |
+
|
| 694 |
+
b. indicate if you modified the Licensed Material and retain an indication of any previous modifications; and
|
| 695 |
+
|
| 696 |
+
c. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. For the avoidance of doubt, you do not have permission under this Public License to Share Adapted Material.
|
| 697 |
+
|
| 698 |
+
2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which you share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information.
|
| 699 |
+
|
| 700 |
+
3. If requested by the Licensor, you must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable.
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
Section 4 -- Sui Generis Database Rights.
|
| 704 |
+
|
| 705 |
+
Where the Licensed Rights include Sui Generis Database Rights that
|
| 706 |
+
apply to Your use of the Licensed Material:
|
| 707 |
+
|
| 708 |
+
a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database for NonCommercial purposes only and provided You do not Share Adapted Material;
|
| 709 |
+
|
| 710 |
+
b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and
|
| 711 |
+
|
| 712 |
+
c. You must comply with the conditions in Section 3(a) if you share all or a substantial portion of the contents of the database.
|
| 713 |
+
|
| 714 |
+
For the avoidance of doubt, this Section 4 supplements and does not replace your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights.
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
| 718 |
+
|
| 719 |
+
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
| 720 |
+
|
| 721 |
+
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
| 722 |
+
|
| 723 |
+
c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer andwaiver of all liability.
|
| 724 |
+
|
| 725 |
+
|
| 726 |
+
Section 6 -- Term and Termination.
|
| 727 |
+
|
| 728 |
+
a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically.
|
| 729 |
+
|
| 730 |
+
b. Where your right to use the Licensed Material has terminated under Section 6(a), it reinstates:
|
| 731 |
+
|
| 732 |
+
1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or
|
| 733 |
+
|
| 734 |
+
2. upon express reinstatement by the Licensor.
|
| 735 |
+
|
| 736 |
+
For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License.
|
| 737 |
+
|
| 738 |
+
c. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License.
|
| 739 |
+
|
| 740 |
+
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
Section 7 -- Other Terms and Conditions.
|
| 744 |
+
|
| 745 |
+
a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed.
|
| 746 |
+
|
| 747 |
+
b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License.
|
| 748 |
+
|
| 749 |
+
|
| 750 |
+
Section 8 -- Interpretation.
|
| 751 |
+
|
| 752 |
+
a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License.
|
| 753 |
+
|
| 754 |
+
b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions.
|
| 755 |
+
|
| 756 |
+
c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor.
|
| 757 |
+
|
| 758 |
+
d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority.
|
| 759 |
+
|
| 760 |
+
Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the "Licensor." The text of the Creative Commons public licenses is dedicated to the public domain under the CC0 Public
|
| 761 |
+
Domain Dedication. Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark "Creative Commons" or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses.
|
| 762 |
+
|
| 763 |
+
Creative Commons may be contacted at creativecommons.org.
|
| 764 |
+
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
------------- LICENSE FOR CDLA --------------
|
| 768 |
+
|
| 769 |
+
Copyright notice:No copyright info provided
|
| 770 |
+
|
| 771 |
+
License:No License info provided
|
| 772 |
+
|
| 773 |
+
|
| 774 |
+
|
| 775 |
+
------------- LICENSE FOR D4LA --------------
|
| 776 |
+
|
| 777 |
+
Copyright notice:No copyright info provided
|
| 778 |
+
|
| 779 |
+
License:No License info provided
|
README.md
CHANGED
|
@@ -1,3 +1,1315 @@
|
|
| 1 |
-
---
|
| 2 |
-
license: mit
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: mit
|
| 3 |
+
library_name: dots_ocr
|
| 4 |
+
pipeline_tag: image-text-to-text
|
| 5 |
+
tags:
|
| 6 |
+
- image-to-text
|
| 7 |
+
- ocr
|
| 8 |
+
- document-parse
|
| 9 |
+
- layout
|
| 10 |
+
- table
|
| 11 |
+
- formula
|
| 12 |
+
- transformers
|
| 13 |
+
- custom_code
|
| 14 |
+
language:
|
| 15 |
+
- en
|
| 16 |
+
- zh
|
| 17 |
+
- multilingual
|
| 18 |
+
---
|
| 19 |
+
|
| 20 |
+
<div align="center">
|
| 21 |
+
|
| 22 |
+
<p align="center">
|
| 23 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/logo.png" width="300"/>
|
| 24 |
+
<p>
|
| 25 |
+
|
| 26 |
+
<h1 align="center">
|
| 27 |
+
dots.ocr: Multilingual Document Layout Parsing in a Single Vision-Language Model
|
| 28 |
+
</h1>
|
| 29 |
+
|
| 30 |
+
[](https://github.com/rednote-hilab/dots.ocr/blob/master/assets/blog.md)
|
| 31 |
+
[](https://huggingface.co/rednote-hilab/dots.ocr)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
<div align="center">
|
| 35 |
+
<a href="https://dotsocr.xiaohongshu.com" target="_blank" rel="noopener noreferrer"><strong>🖥️ Live Demo</strong></a> |
|
| 36 |
+
<a href="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/wechat.jpg" target="_blank" rel="noopener noreferrer"><strong>💬 WeChat</strong></a> |
|
| 37 |
+
<a href="https://www.xiaohongshu.com/user/profile/683ffe42000000001d021a4c" target="_blank" rel="noopener noreferrer"><strong>📕 rednote</strong></a>
|
| 38 |
+
</div>
|
| 39 |
+
|
| 40 |
+
</div>
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
## Introduction
|
| 45 |
+
|
| 46 |
+
**dots.ocr** is a powerful, multilingual document parser that unifies layout detection and content recognition within a single vision-language model while maintaining good reading order. Despite its compact 1.7B-parameter LLM foundation, it achieves state-of-the-art(SOTA) performance.
|
| 47 |
+
|
| 48 |
+
1. **Powerful Performance:** **dots.ocr** achieves SOTA performance for text, tables, and reading order on [OmniDocBench](https://github.com/opendatalab/OmniDocBench), while delivering formula recognition results comparable to much larger models like Doubao-1.5 and gemini2.5-pro.
|
| 49 |
+
2. **Multilingual Support:** **dots.ocr** demonstrates robust parsing capabilities for low-resource languages, achieving decisive advantages across both layout detection and content recognition on our in-house multilingual documents benchmark.
|
| 50 |
+
3. **Unified and Simple Architecture:** By leveraging a single vision-language model, **dots.ocr** offers a significantly more streamlined architecture than conventional methods that rely on complex, multi-model pipelines. Switching between tasks is accomplished simply by altering the input prompt, proving that a VLM can achieve competitive detection results compared to traditional detection models like DocLayout-YOLO.
|
| 51 |
+
4. **Efficient and Fast Performance:** Built upon a compact 1.7B LLM, **dots.ocr** provides faster inference speeds than many other high-performing models based on larger foundations.
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
## Usage with transformers
|
| 55 |
+
|
| 56 |
+
```py
|
| 57 |
+
import torch
|
| 58 |
+
from transformers import AutoModelForCausalLM, AutoProcessor, AutoTokenizer
|
| 59 |
+
from qwen_vl_utils import process_vision_info
|
| 60 |
+
from dots_ocr.utils import dict_promptmode_to_prompt
|
| 61 |
+
|
| 62 |
+
model_path = "./weights/DotsOCR"
|
| 63 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 64 |
+
model_path,
|
| 65 |
+
attn_implementation="flash_attention_2",
|
| 66 |
+
torch_dtype=torch.bfloat16,
|
| 67 |
+
device_map="auto",
|
| 68 |
+
trust_remote_code=True
|
| 69 |
+
)
|
| 70 |
+
processor = AutoProcessor.from_pretrained(model_path, trust_remote_code=True)
|
| 71 |
+
|
| 72 |
+
image_path = "demo/demo_image1.jpg"
|
| 73 |
+
prompt = """Please output the layout information from the PDF image, including each layout element's bbox, its category, and the corresponding text content within the bbox.
|
| 74 |
+
|
| 75 |
+
1. Bbox format: [x1, y1, x2, y2]
|
| 76 |
+
|
| 77 |
+
2. Layout Categories: The possible categories are ['Caption', 'Footnote', 'Formula', 'List-item', 'Page-footer', 'Page-header', 'Picture', 'Section-header', 'Table', 'Text', 'Title'].
|
| 78 |
+
|
| 79 |
+
3. Text Extraction & Formatting Rules:
|
| 80 |
+
- Picture: For the 'Picture' category, the text field should be omitted.
|
| 81 |
+
- Formula: Format its text as LaTeX.
|
| 82 |
+
- Table: Format its text as HTML.
|
| 83 |
+
- All Others (Text, Title, etc.): Format their text as Markdown.
|
| 84 |
+
|
| 85 |
+
4. Constraints:
|
| 86 |
+
- The output text must be the original text from the image, with no translation.
|
| 87 |
+
- All layout elements must be sorted according to human reading order.
|
| 88 |
+
|
| 89 |
+
5. Final Output: The entire output must be a single JSON object.
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
messages = [
|
| 93 |
+
{
|
| 94 |
+
"role": "user",
|
| 95 |
+
"content": [
|
| 96 |
+
{
|
| 97 |
+
"type": "image",
|
| 98 |
+
"image": image_path
|
| 99 |
+
},
|
| 100 |
+
{"type": "text", "text": prompt}
|
| 101 |
+
]
|
| 102 |
+
}
|
| 103 |
+
]
|
| 104 |
+
|
| 105 |
+
# Preparation for inference
|
| 106 |
+
text = processor.apply_chat_template(
|
| 107 |
+
messages,
|
| 108 |
+
tokenize=False,
|
| 109 |
+
add_generation_prompt=True
|
| 110 |
+
)
|
| 111 |
+
image_inputs, video_inputs = process_vision_info(messages)
|
| 112 |
+
inputs = processor(
|
| 113 |
+
text=[text],
|
| 114 |
+
images=image_inputs,
|
| 115 |
+
videos=video_inputs,
|
| 116 |
+
padding=True,
|
| 117 |
+
return_tensors="pt",
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
inputs = inputs.to("cuda")
|
| 121 |
+
|
| 122 |
+
# Inference: Generation of the output
|
| 123 |
+
generated_ids = model.generate(**inputs, max_new_tokens=24000)
|
| 124 |
+
generated_ids_trimmed = [
|
| 125 |
+
out_ids[len(in_ids) :] for in_ids, out_ids in zip(inputs.input_ids, generated_ids)
|
| 126 |
+
]
|
| 127 |
+
output_text = processor.batch_decode(
|
| 128 |
+
generated_ids_trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False
|
| 129 |
+
)
|
| 130 |
+
print(output_text)
|
| 131 |
+
```
|
| 132 |
+
|
| 133 |
+
### Performance Comparison: dots.ocr vs. Competing Models
|
| 134 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/chart.png" border="0" />
|
| 135 |
+
|
| 136 |
+
> **Notes:**
|
| 137 |
+
> - The EN, ZH metrics are the end2end evaluation results of [OmniDocBench](https://github.com/opendatalab/OmniDocBench), and Multilingual metric is the end2end evaluation results of dots.ocr-bench.
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
## News
|
| 141 |
+
* ```2025.07.30 ``` 🚀 We release [dots.ocr](https://github.com/rednote-hilab/dots.ocr), — a multilingual documents parsing model based on 1.7b llm, with SOTA performance.
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
## Benchmark Results
|
| 146 |
+
|
| 147 |
+
### 1. OmniDocBench
|
| 148 |
+
|
| 149 |
+
#### The end-to-end evaluation results of different tasks.
|
| 150 |
+
|
| 151 |
+
<table>
|
| 152 |
+
<thead>
|
| 153 |
+
<tr>
|
| 154 |
+
<th rowspan="2"><strong>Model<br>Type</strong></th>
|
| 155 |
+
<th rowspan="2"><strong>Methods</strong></th>
|
| 156 |
+
<th colspan="2"><strong>Overall<sup>Edit</sup>↓</strong></th>
|
| 157 |
+
<th colspan="2"><strong>Text<sup>Edit</sup>↓</strong></th>
|
| 158 |
+
<th colspan="2"><strong>Formula<sup>Edit</sup>↓</strong></th>
|
| 159 |
+
<th colspan="2"><strong>Table<sup>TEDS</sup>↑</strong></th>
|
| 160 |
+
<th colspan="2"><strong>Table<sup>Edit</sup>↓</strong></th>
|
| 161 |
+
<th colspan="2"><strong>Read Order<sup>Edit</sup>↓</strong></th>
|
| 162 |
+
</tr>
|
| 163 |
+
<tr>
|
| 164 |
+
<th><em>EN</em></th>
|
| 165 |
+
<th><em>ZH</em></th>
|
| 166 |
+
<th><em>EN</em></th>
|
| 167 |
+
<th><em>ZH</em></th>
|
| 168 |
+
<th><em>EN</em></th>
|
| 169 |
+
<th><em>ZH</em></th>
|
| 170 |
+
<th><em>EN</em></th>
|
| 171 |
+
<th><em>ZH</em></th>
|
| 172 |
+
<th><em>EN</em></th>
|
| 173 |
+
<th><em>ZH</em></th>
|
| 174 |
+
<th><em>EN</em></th>
|
| 175 |
+
<th><em>ZH</em></th>
|
| 176 |
+
</tr>
|
| 177 |
+
</thead>
|
| 178 |
+
<tbody>
|
| 179 |
+
<tr>
|
| 180 |
+
<td rowspan="8"><strong>Pipeline<br>Tools</strong></td>
|
| 181 |
+
<td>MinerU</td>
|
| 182 |
+
<td>0.150</td>
|
| 183 |
+
<td>0.357</td>
|
| 184 |
+
<td>0.061</td>
|
| 185 |
+
<td>0.215</td>
|
| 186 |
+
<td>0.278</td>
|
| 187 |
+
<td>0.577</td>
|
| 188 |
+
<td>78.6</td>
|
| 189 |
+
<td>62.1</td>
|
| 190 |
+
<td>0.180</td>
|
| 191 |
+
<td>0.344</td>
|
| 192 |
+
<td>0.079</td>
|
| 193 |
+
<td>0.292</td>
|
| 194 |
+
</tr>
|
| 195 |
+
<tr>
|
| 196 |
+
<td>Marker</td>
|
| 197 |
+
<td>0.336</td>
|
| 198 |
+
<td>0.556</td>
|
| 199 |
+
<td>0.080</td>
|
| 200 |
+
<td>0.315</td>
|
| 201 |
+
<td>0.530</td>
|
| 202 |
+
<td>0.883</td>
|
| 203 |
+
<td>67.6</td>
|
| 204 |
+
<td>49.2</td>
|
| 205 |
+
<td>0.619</td>
|
| 206 |
+
<td>0.685</td>
|
| 207 |
+
<td>0.114</td>
|
| 208 |
+
<td>0.340</td>
|
| 209 |
+
</tr>
|
| 210 |
+
<tr>
|
| 211 |
+
<td>Mathpix</td>
|
| 212 |
+
<td>0.191</td>
|
| 213 |
+
<td>0.365</td>
|
| 214 |
+
<td>0.105</td>
|
| 215 |
+
<td>0.384</td>
|
| 216 |
+
<td>0.306</td>
|
| 217 |
+
<td>0.454</td>
|
| 218 |
+
<td>77.0</td>
|
| 219 |
+
<td>67.1</td>
|
| 220 |
+
<td>0.243</td>
|
| 221 |
+
<td>0.320</td>
|
| 222 |
+
<td>0.108</td>
|
| 223 |
+
<td>0.304</td>
|
| 224 |
+
</tr>
|
| 225 |
+
<tr>
|
| 226 |
+
<td>Docling</td>
|
| 227 |
+
<td>0.589</td>
|
| 228 |
+
<td>0.909</td>
|
| 229 |
+
<td>0.416</td>
|
| 230 |
+
<td>0.987</td>
|
| 231 |
+
<td>0.999</td>
|
| 232 |
+
<td>1</td>
|
| 233 |
+
<td>61.3</td>
|
| 234 |
+
<td>25.0</td>
|
| 235 |
+
<td>0.627</td>
|
| 236 |
+
<td>0.810</td>
|
| 237 |
+
<td>0.313</td>
|
| 238 |
+
<td>0.837</td>
|
| 239 |
+
</tr>
|
| 240 |
+
<tr>
|
| 241 |
+
<td>Pix2Text</td>
|
| 242 |
+
<td>0.320</td>
|
| 243 |
+
<td>0.528</td>
|
| 244 |
+
<td>0.138</td>
|
| 245 |
+
<td>0.356</td>
|
| 246 |
+
<td>0.276</td>
|
| 247 |
+
<td>0.611</td>
|
| 248 |
+
<td>73.6</td>
|
| 249 |
+
<td>66.2</td>
|
| 250 |
+
<td>0.584</td>
|
| 251 |
+
<td>0.645</td>
|
| 252 |
+
<td>0.281</td>
|
| 253 |
+
<td>0.499</td>
|
| 254 |
+
</tr>
|
| 255 |
+
<tr>
|
| 256 |
+
<td>Unstructured</td>
|
| 257 |
+
<td>0.586</td>
|
| 258 |
+
<td>0.716</td>
|
| 259 |
+
<td>0.198</td>
|
| 260 |
+
<td>0.481</td>
|
| 261 |
+
<td>0.999</td>
|
| 262 |
+
<td>1</td>
|
| 263 |
+
<td>0</td>
|
| 264 |
+
<td>0.06</td>
|
| 265 |
+
<td>1</td>
|
| 266 |
+
<td>0.998</td>
|
| 267 |
+
<td>0.145</td>
|
| 268 |
+
<td>0.387</td>
|
| 269 |
+
</tr>
|
| 270 |
+
<tr>
|
| 271 |
+
<td>OpenParse</td>
|
| 272 |
+
<td>0.646</td>
|
| 273 |
+
<td>0.814</td>
|
| 274 |
+
<td>0.681</td>
|
| 275 |
+
<td>0.974</td>
|
| 276 |
+
<td>0.996</td>
|
| 277 |
+
<td>1</td>
|
| 278 |
+
<td>64.8</td>
|
| 279 |
+
<td>27.5</td>
|
| 280 |
+
<td>0.284</td>
|
| 281 |
+
<td>0.639</td>
|
| 282 |
+
<td>0.595</td>
|
| 283 |
+
<td>0.641</td>
|
| 284 |
+
</tr>
|
| 285 |
+
<tr>
|
| 286 |
+
<td>PPStruct-V3</td>
|
| 287 |
+
<td>0.145</td>
|
| 288 |
+
<td>0.206</td>
|
| 289 |
+
<td>0.058</td>
|
| 290 |
+
<td>0.088</td>
|
| 291 |
+
<td>0.295</td>
|
| 292 |
+
<td>0.535</td>
|
| 293 |
+
<td>-</td>
|
| 294 |
+
<td>-</td>
|
| 295 |
+
<td>0.159</td>
|
| 296 |
+
<td>0.109</td>
|
| 297 |
+
<td>0.069</td>
|
| 298 |
+
<td>0.091</td>
|
| 299 |
+
</tr>
|
| 300 |
+
<tr>
|
| 301 |
+
<td rowspan="9"><strong>Expert<br>VLMs</strong></td>
|
| 302 |
+
<td>GOT-OCR</td>
|
| 303 |
+
<td>0.287</td>
|
| 304 |
+
<td>0.411</td>
|
| 305 |
+
<td>0.189</td>
|
| 306 |
+
<td>0.315</td>
|
| 307 |
+
<td>0.360</td>
|
| 308 |
+
<td>0.528</td>
|
| 309 |
+
<td>53.2</td>
|
| 310 |
+
<td>47.2</td>
|
| 311 |
+
<td>0.459</td>
|
| 312 |
+
<td>0.520</td>
|
| 313 |
+
<td>0.141</td>
|
| 314 |
+
<td>0.280</td>
|
| 315 |
+
</tr>
|
| 316 |
+
<tr>
|
| 317 |
+
<td>Nougat</td>
|
| 318 |
+
<td>0.452</td>
|
| 319 |
+
<td>0.973</td>
|
| 320 |
+
<td>0.365</td>
|
| 321 |
+
<td>0.998</td>
|
| 322 |
+
<td>0.488</td>
|
| 323 |
+
<td>0.941</td>
|
| 324 |
+
<td>39.9</td>
|
| 325 |
+
<td>0</td>
|
| 326 |
+
<td>0.572</td>
|
| 327 |
+
<td>1.000</td>
|
| 328 |
+
<td>0.382</td>
|
| 329 |
+
<td>0.954</td>
|
| 330 |
+
</tr>
|
| 331 |
+
<tr>
|
| 332 |
+
<td>Mistral OCR</td>
|
| 333 |
+
<td>0.268</td>
|
| 334 |
+
<td>0.439</td>
|
| 335 |
+
<td>0.072</td>
|
| 336 |
+
<td>0.325</td>
|
| 337 |
+
<td>0.318</td>
|
| 338 |
+
<td>0.495</td>
|
| 339 |
+
<td>75.8</td>
|
| 340 |
+
<td>63.6</td>
|
| 341 |
+
<td>0.600</td>
|
| 342 |
+
<td>0.650</td>
|
| 343 |
+
<td>0.083</td>
|
| 344 |
+
<td>0.284</td>
|
| 345 |
+
</tr>
|
| 346 |
+
<tr>
|
| 347 |
+
<td>OLMOCR-sglang</td>
|
| 348 |
+
<td>0.326</td>
|
| 349 |
+
<td>0.469</td>
|
| 350 |
+
<td>0.097</td>
|
| 351 |
+
<td>0.293</td>
|
| 352 |
+
<td>0.455</td>
|
| 353 |
+
<td>0.655</td>
|
| 354 |
+
<td>68.1</td>
|
| 355 |
+
<td>61.3</td>
|
| 356 |
+
<td>0.608</td>
|
| 357 |
+
<td>0.652</td>
|
| 358 |
+
<td>0.145</td>
|
| 359 |
+
<td>0.277</td>
|
| 360 |
+
</tr>
|
| 361 |
+
<tr>
|
| 362 |
+
<td>SmolDocling-256M</td>
|
| 363 |
+
<td>0.493</td>
|
| 364 |
+
<td>0.816</td>
|
| 365 |
+
<td>0.262</td>
|
| 366 |
+
<td>0.838</td>
|
| 367 |
+
<td>0.753</td>
|
| 368 |
+
<td>0.997</td>
|
| 369 |
+
<td>44.9</td>
|
| 370 |
+
<td>16.5</td>
|
| 371 |
+
<td>0.729</td>
|
| 372 |
+
<td>0.907</td>
|
| 373 |
+
<td>0.227</td>
|
| 374 |
+
<td>0.522</td>
|
| 375 |
+
</tr>
|
| 376 |
+
<tr>
|
| 377 |
+
<td>Dolphin</td>
|
| 378 |
+
<td>0.206</td>
|
| 379 |
+
<td>0.306</td>
|
| 380 |
+
<td>0.107</td>
|
| 381 |
+
<td>0.197</td>
|
| 382 |
+
<td>0.447</td>
|
| 383 |
+
<td>0.580</td>
|
| 384 |
+
<td>77.3</td>
|
| 385 |
+
<td>67.2</td>
|
| 386 |
+
<td>0.180</td>
|
| 387 |
+
<td>0.285</td>
|
| 388 |
+
<td>0.091</td>
|
| 389 |
+
<td>0.162</td>
|
| 390 |
+
</tr>
|
| 391 |
+
<tr>
|
| 392 |
+
<td>MinerU 2</td>
|
| 393 |
+
<td>0.139</td>
|
| 394 |
+
<td>0.240</td>
|
| 395 |
+
<td>0.047</td>
|
| 396 |
+
<td>0.109</td>
|
| 397 |
+
<td>0.297</td>
|
| 398 |
+
<td>0.536</td>
|
| 399 |
+
<td>82.5</td>
|
| 400 |
+
<td>79.0</td>
|
| 401 |
+
<td>0.141</td>
|
| 402 |
+
<td>0.195</td>
|
| 403 |
+
<td>0.069<</td>
|
| 404 |
+
<td>0.118</td>
|
| 405 |
+
</tr>
|
| 406 |
+
<tr>
|
| 407 |
+
<td>OCRFlux</td>
|
| 408 |
+
<td>0.195</td>
|
| 409 |
+
<td>0.281</td>
|
| 410 |
+
<td>0.064</td>
|
| 411 |
+
<td>0.183</td>
|
| 412 |
+
<td>0.379</td>
|
| 413 |
+
<td>0.613</td>
|
| 414 |
+
<td>71.6</td>
|
| 415 |
+
<td>81.3</td>
|
| 416 |
+
<td>0.253</td>
|
| 417 |
+
<td>0.139</td>
|
| 418 |
+
<td>0.086</td>
|
| 419 |
+
<td>0.187</td>
|
| 420 |
+
</tr>
|
| 421 |
+
<tr>
|
| 422 |
+
<td>MonkeyOCR-pro-3B</td>
|
| 423 |
+
<td>0.138</td>
|
| 424 |
+
<td>0.206</td>
|
| 425 |
+
<td>0.067</td>
|
| 426 |
+
<td>0.107</td>
|
| 427 |
+
<td><strong>0.246</strong></td>
|
| 428 |
+
<td>0.421</td>
|
| 429 |
+
<td>81.5</td>
|
| 430 |
+
<td>87.5</td>
|
| 431 |
+
<td>0.139</td>
|
| 432 |
+
<td>0.111</td>
|
| 433 |
+
<td>0.100</td>
|
| 434 |
+
<td>0.185</td>
|
| 435 |
+
</tr>
|
| 436 |
+
<tr>
|
| 437 |
+
|
| 438 |
+
<td rowspan="5"><strong>General<br>VLMs</strong></td>
|
| 439 |
+
<td>GPT4o</td>
|
| 440 |
+
<td>0.233</td>
|
| 441 |
+
<td>0.399</td>
|
| 442 |
+
<td>0.144</td>
|
| 443 |
+
<td>0.409</td>
|
| 444 |
+
<td>0.425</td>
|
| 445 |
+
<td>0.606</td>
|
| 446 |
+
<td>72.0</td>
|
| 447 |
+
<td>62.9</td>
|
| 448 |
+
<td>0.234</td>
|
| 449 |
+
<td>0.329</td>
|
| 450 |
+
<td>0.128</td>
|
| 451 |
+
<td>0.251</td>
|
| 452 |
+
</tr>
|
| 453 |
+
<tr>
|
| 454 |
+
<td>Qwen2-VL-72B</td>
|
| 455 |
+
<td>0.252</td>
|
| 456 |
+
<td>0.327</td>
|
| 457 |
+
<td>0.096</td>
|
| 458 |
+
<td>0.218</td>
|
| 459 |
+
<td>0.404</td>
|
| 460 |
+
<td>0.487</td>
|
| 461 |
+
<td>76.8</td>
|
| 462 |
+
<td>76.4</td>
|
| 463 |
+
<td>0.387</td>
|
| 464 |
+
<td>0.408</td>
|
| 465 |
+
<td>0.119</td>
|
| 466 |
+
<td>0.193</td>
|
| 467 |
+
</tr>
|
| 468 |
+
<tr>
|
| 469 |
+
<td>Qwen2.5-VL-72B</td>
|
| 470 |
+
<td>0.214</td>
|
| 471 |
+
<td>0.261</td>
|
| 472 |
+
<td>0.092</td>
|
| 473 |
+
<td>0.18</td>
|
| 474 |
+
<td>0.315</td>
|
| 475 |
+
<td>0.434</td>
|
| 476 |
+
<td>82.9</td>
|
| 477 |
+
<td>83.9</td>
|
| 478 |
+
<td>0.341</td>
|
| 479 |
+
<td>0.262</td>
|
| 480 |
+
<td>0.106</td>
|
| 481 |
+
<td>0.168</td>
|
| 482 |
+
</tr>
|
| 483 |
+
<tr>
|
| 484 |
+
<td>Gemini2.5-Pro</td>
|
| 485 |
+
<td>0.148</td>
|
| 486 |
+
<td>0.212</td>
|
| 487 |
+
<td>0.055</td>
|
| 488 |
+
<td>0.168</td>
|
| 489 |
+
<td>0.356</td>
|
| 490 |
+
<td>0.439</td>
|
| 491 |
+
<td>85.8</td>
|
| 492 |
+
<td>86.4</td>
|
| 493 |
+
<td>0.13</td>
|
| 494 |
+
<td>0.119</td>
|
| 495 |
+
<td>0.049</td>
|
| 496 |
+
<td>0.121</td>
|
| 497 |
+
</tr>
|
| 498 |
+
<tr>
|
| 499 |
+
<td>doubao-1-5-thinking-vision-pro-250428</td>
|
| 500 |
+
<td>0.140</td>
|
| 501 |
+
<td>0.162</td>
|
| 502 |
+
<td>0.043</td>
|
| 503 |
+
<td>0.085</td>
|
| 504 |
+
<td>0.295</td>
|
| 505 |
+
<td><strong>0.384</strong></td>
|
| 506 |
+
<td>83.3</td>
|
| 507 |
+
<td><strong>89.3</strong></td>
|
| 508 |
+
<td>0.165</td>
|
| 509 |
+
<td><strong>0.085</strong></td>
|
| 510 |
+
<td>0.058</td>
|
| 511 |
+
<td>0.094</td>
|
| 512 |
+
</tr>
|
| 513 |
+
<tr>
|
| 514 |
+
<td rowspan="1"><strong>Expert VLMs</strong></td>
|
| 515 |
+
<td><strong>dots.ocr</strong></td>
|
| 516 |
+
<td><strong>0.125</strong></td>
|
| 517 |
+
<td><strong>0.160</strong></td>
|
| 518 |
+
<td><strong>0.032</strong></td>
|
| 519 |
+
<td><strong>0.066</strong></td>
|
| 520 |
+
<td>0.329</td>
|
| 521 |
+
<td>0.416</td>
|
| 522 |
+
<td><strong>88.6</strong></td>
|
| 523 |
+
<td>89.0</td>
|
| 524 |
+
<td><strong>0.099</strong></td>
|
| 525 |
+
<td>0.092</td>
|
| 526 |
+
<td><strong>0.040</strong></td>
|
| 527 |
+
<td><strong>0.067</strong></td>
|
| 528 |
+
</tr>
|
| 529 |
+
<tr>
|
| 530 |
+
</tbody>
|
| 531 |
+
</table>
|
| 532 |
+
|
| 533 |
+
|
| 534 |
+
#### The end-to-end text recognition performance across 9 PDF page types.
|
| 535 |
+
|
| 536 |
+
<table>
|
| 537 |
+
<thead>
|
| 538 |
+
<tr>
|
| 539 |
+
<th><strong>Model<br>Type</strong></th>
|
| 540 |
+
<th><strong>Models</strong></th>
|
| 541 |
+
<th><strong>Book</strong></th>
|
| 542 |
+
<th><strong>Slides</strong></th>
|
| 543 |
+
<th><strong>Financial<br>Report</strong></th>
|
| 544 |
+
<th><strong>Textbook</strong></th>
|
| 545 |
+
<th><strong>Exam<br>Paper</strong></th>
|
| 546 |
+
<th><strong>Magazine</strong></th>
|
| 547 |
+
<th><strong>Academic<br>Papers</strong></th>
|
| 548 |
+
<th><strong>Notes</strong></th>
|
| 549 |
+
<th><strong>Newspaper</strong></th>
|
| 550 |
+
<th><strong>Overall</strong></th>
|
| 551 |
+
</tr>
|
| 552 |
+
</thead>
|
| 553 |
+
<tbody>
|
| 554 |
+
<tr>
|
| 555 |
+
<td rowspan="3"><strong>Pipeline<br>Tools</strong></td>
|
| 556 |
+
<td>MinerU</td>
|
| 557 |
+
<td>0.055</td>
|
| 558 |
+
<td>0.124</td>
|
| 559 |
+
<td><u>0.033</u></td>
|
| 560 |
+
<td>0.102</td>
|
| 561 |
+
<td>0.159</td>
|
| 562 |
+
<td><strong>0.072</strong></td>
|
| 563 |
+
<td><u>0.025</u></td>
|
| 564 |
+
<td>0.984</td>
|
| 565 |
+
<td>0.171</td>
|
| 566 |
+
<td>0.206</td>
|
| 567 |
+
</tr>
|
| 568 |
+
<tr>
|
| 569 |
+
<td>Marker</td>
|
| 570 |
+
<td>0.074</td>
|
| 571 |
+
<td>0.340</td>
|
| 572 |
+
<td>0.089</td>
|
| 573 |
+
<td>0.319</td>
|
| 574 |
+
<td>0.452</td>
|
| 575 |
+
<td>0.153</td>
|
| 576 |
+
<td>0.059</td>
|
| 577 |
+
<td>0.651</td>
|
| 578 |
+
<td>0.192</td>
|
| 579 |
+
<td>0.274</td>
|
| 580 |
+
</tr>
|
| 581 |
+
<tr>
|
| 582 |
+
<td>Mathpix</td>
|
| 583 |
+
<td>0.131</td>
|
| 584 |
+
<td>0.220</td>
|
| 585 |
+
<td>0.202</td>
|
| 586 |
+
<td>0.216</td>
|
| 587 |
+
<td>0.278</td>
|
| 588 |
+
<td>0.147</td>
|
| 589 |
+
<td>0.091</td>
|
| 590 |
+
<td>0.634</td>
|
| 591 |
+
<td>0.690</td>
|
| 592 |
+
<td>0.300</td>
|
| 593 |
+
</tr>
|
| 594 |
+
<tr>
|
| 595 |
+
<td rowspan="5"><strong>Expert<br>VLMs</strong></td>
|
| 596 |
+
<td>GOT-OCR</td>
|
| 597 |
+
<td>0.111</td>
|
| 598 |
+
<td>0.222</td>
|
| 599 |
+
<td>0.067</td>
|
| 600 |
+
<td>0.132</td>
|
| 601 |
+
<td>0.204</td>
|
| 602 |
+
<td>0.198</td>
|
| 603 |
+
<td>0.179</td>
|
| 604 |
+
<td>0.388</td>
|
| 605 |
+
<td>0.771</td>
|
| 606 |
+
<td>0.267</td>
|
| 607 |
+
</tr>
|
| 608 |
+
<tr>
|
| 609 |
+
<td>Nougat</td>
|
| 610 |
+
<td>0.734</td>
|
| 611 |
+
<td>0.958</td>
|
| 612 |
+
<td>1.000</td>
|
| 613 |
+
<td>0.820</td>
|
| 614 |
+
<td>0.930</td>
|
| 615 |
+
<td>0.830</td>
|
| 616 |
+
<td>0.214</td>
|
| 617 |
+
<td>0.991</td>
|
| 618 |
+
<td>0.871</td>
|
| 619 |
+
<td>0.806</td>
|
| 620 |
+
</tr>
|
| 621 |
+
<tr>
|
| 622 |
+
<td>Dolphin</td>
|
| 623 |
+
<td>0.091</td>
|
| 624 |
+
<td>0.131</td>
|
| 625 |
+
<td>0.057</td>
|
| 626 |
+
<td>0.146</td>
|
| 627 |
+
<td>0.231</td>
|
| 628 |
+
<td>0.121</td>
|
| 629 |
+
<td>0.074</td>
|
| 630 |
+
<td>0.363</td>
|
| 631 |
+
<td>0.307</td>
|
| 632 |
+
<td>0.177</td>
|
| 633 |
+
</tr>
|
| 634 |
+
<tr>
|
| 635 |
+
<td>OCRFlux</td>
|
| 636 |
+
<td>0.068</td>
|
| 637 |
+
<td>0.125</td>
|
| 638 |
+
<td>0.092</td>
|
| 639 |
+
<td>0.102</td>
|
| 640 |
+
<td>0.119</td>
|
| 641 |
+
<td>0.083</td>
|
| 642 |
+
<td>0.047</td>
|
| 643 |
+
<td>0.223</td>
|
| 644 |
+
<td>0.536</td>
|
| 645 |
+
<td>0.149</td>
|
| 646 |
+
</tr>
|
| 647 |
+
<tr>
|
| 648 |
+
<td>MonkeyOCR-pro-3B</td>
|
| 649 |
+
<td>0.084</td>
|
| 650 |
+
<td>0.129</td>
|
| 651 |
+
<td>0.060</td>
|
| 652 |
+
<td>0.090</td>
|
| 653 |
+
<td>0.107</td>
|
| 654 |
+
<td>0.073</td>
|
| 655 |
+
<td>0.050</td>
|
| 656 |
+
<td>0.171</td>
|
| 657 |
+
<td>0.107</td>
|
| 658 |
+
<td>0.100</td>
|
| 659 |
+
</tr>
|
| 660 |
+
<tr>
|
| 661 |
+
<td rowspan="4"><strong>General<br>VLMs</strong></td>
|
| 662 |
+
<td>GPT4o</td>
|
| 663 |
+
<td>0.157</td>
|
| 664 |
+
<td>0.163</td>
|
| 665 |
+
<td>0.348</td>
|
| 666 |
+
<td>0.187</td>
|
| 667 |
+
<td>0.281</td>
|
| 668 |
+
<td>0.173</td>
|
| 669 |
+
<td>0.146</td>
|
| 670 |
+
<td>0.607</td>
|
| 671 |
+
<td>0.751</td>
|
| 672 |
+
<td>0.316</td>
|
| 673 |
+
</tr>
|
| 674 |
+
<tr>
|
| 675 |
+
<td>Qwen2.5-VL-7B</td>
|
| 676 |
+
<td>0.148</td>
|
| 677 |
+
<td>0.053</td>
|
| 678 |
+
<td>0.111</td>
|
| 679 |
+
<td>0.137</td>
|
| 680 |
+
<td>0.189</td>
|
| 681 |
+
<td>0.117</td>
|
| 682 |
+
<td>0.134</td>
|
| 683 |
+
<td>0.204</td>
|
| 684 |
+
<td>0.706</td>
|
| 685 |
+
<td>0.205</td>
|
| 686 |
+
</tr>
|
| 687 |
+
<tr>
|
| 688 |
+
<td>InternVL3-8B</td>
|
| 689 |
+
<td>0.163</td>
|
| 690 |
+
<td>0.056</td>
|
| 691 |
+
<td>0.107</td>
|
| 692 |
+
<td>0.109</td>
|
| 693 |
+
<td>0.129</td>
|
| 694 |
+
<td>0.100</td>
|
| 695 |
+
<td>0.159</td>
|
| 696 |
+
<td>0.150</td>
|
| 697 |
+
<td>0.681</td>
|
| 698 |
+
<td>0.188</td>
|
| 699 |
+
</tr>
|
| 700 |
+
<tr>
|
| 701 |
+
<td>doubao-1-5-thinking-vision-pro-250428</td>
|
| 702 |
+
<td>0.048</td>
|
| 703 |
+
<td>0.048</td>
|
| 704 |
+
<td>0.024</td>
|
| 705 |
+
<td><strong>0.062</strong></td>
|
| 706 |
+
<td>0.085</td>
|
| 707 |
+
<td>0.051</td>
|
| 708 |
+
<td>0.039</td>
|
| 709 |
+
<td><strong>0.096</strong></td>
|
| 710 |
+
<td>0.181</td>
|
| 711 |
+
<td>0.073</td>
|
| 712 |
+
</tr>
|
| 713 |
+
<tr>
|
| 714 |
+
<td rowspan="1"><strong>Expert VLMs</strong></td>
|
| 715 |
+
<td><strong>dots.ocr</strong></td>
|
| 716 |
+
<td><strong>0.031</strong></td>
|
| 717 |
+
<td><strong>0.047</strong></td>
|
| 718 |
+
<td><strong>0.011</strong></td>
|
| 719 |
+
<td>0.082</td>
|
| 720 |
+
<td><strong>0.079</strong></td>
|
| 721 |
+
<td><strong>0.028</strong></td>
|
| 722 |
+
<td><strong>0.029</strong></td>
|
| 723 |
+
<td>0.109</td>
|
| 724 |
+
<td><strong>0.056</strong></td>
|
| 725 |
+
<td><strong>0.055</strong></td>
|
| 726 |
+
</tr>
|
| 727 |
+
|
| 728 |
+
</tbody>
|
| 729 |
+
</table>
|
| 730 |
+
|
| 731 |
+
> **Notes:**
|
| 732 |
+
> - The metrics are from [MonkeyOCR](https://github.com/Yuliang-Liu/MonkeyOCR), [OmniDocBench](https://github.com/opendatalab/OmniDocBench), and our own internal evaluations.
|
| 733 |
+
> - We delete the Page-header and Page-footer cells in the result markdown.
|
| 734 |
+
> - We use tikz_preprocess pipeline to upsample the images to dpi 200.
|
| 735 |
+
|
| 736 |
+
|
| 737 |
+
### 2. **dots.ocr-bench**
|
| 738 |
+
|
| 739 |
+
This is an inhouse benchmark which contain 1493 pdf images with 100 languages.
|
| 740 |
+
|
| 741 |
+
#### The end-to-end evaluation results of different tasks.
|
| 742 |
+
|
| 743 |
+
<table>
|
| 744 |
+
<thead>
|
| 745 |
+
<tr>
|
| 746 |
+
<th rowspan="1"><strong>Methods</strong></th>
|
| 747 |
+
<th colspan="1"><strong>Overall<sup>Edit</sup>↓</strong></th>
|
| 748 |
+
<th colspan="1"><strong>Text<sup>Edit</sup>↓</strong></th>
|
| 749 |
+
<th colspan="1"><strong>Formula<sup>Edit</sup>↓</strong></th>
|
| 750 |
+
<th colspan="1"><strong>Table<sup>TEDS</sup>↑</strong></th>
|
| 751 |
+
<th colspan="1"><strong>Table<sup>Edit</sup>↓</strong></th>
|
| 752 |
+
<th colspan="1"><strong>Read Order<sup>Edit</sup>↓</strong></th>
|
| 753 |
+
</tr>
|
| 754 |
+
</thead>
|
| 755 |
+
<tbody>
|
| 756 |
+
<td>MonkeyOCR-3B</td>
|
| 757 |
+
<td>0.483</td>
|
| 758 |
+
<td>0.445</td>
|
| 759 |
+
<td>0.627</td>
|
| 760 |
+
<td>50.93</td>
|
| 761 |
+
<td>0.452</td>
|
| 762 |
+
<td>0.409</td>
|
| 763 |
+
</tr>
|
| 764 |
+
<tr>
|
| 765 |
+
<td>doubao-1-5-thinking-vision-pro-250428</td>
|
| 766 |
+
<td>0.291</td>
|
| 767 |
+
<td>0.226</td>
|
| 768 |
+
<td>0.440</td>
|
| 769 |
+
<td>71.2</td>
|
| 770 |
+
<td>0.260</td>
|
| 771 |
+
<td>0.238</td>
|
| 772 |
+
</tr>
|
| 773 |
+
<tr>
|
| 774 |
+
<td>doubao-1-6</td>
|
| 775 |
+
<td>0.299</td>
|
| 776 |
+
<td>0.270</td>
|
| 777 |
+
<td>0.417</td>
|
| 778 |
+
<td>71.0</td>
|
| 779 |
+
<td>0.258</td>
|
| 780 |
+
<td>0.253</td>
|
| 781 |
+
</tr>
|
| 782 |
+
<tr>
|
| 783 |
+
<td>Gemini2.5-Pro</td>
|
| 784 |
+
<td>0.251</td>
|
| 785 |
+
<td>0.163</td>
|
| 786 |
+
<td>0.402</td>
|
| 787 |
+
<td>77.1</td>
|
| 788 |
+
<td>0.236</td>
|
| 789 |
+
<td>0.202</td>
|
| 790 |
+
</tr>
|
| 791 |
+
<tr>
|
| 792 |
+
<td><strong>dots.ocr</strong> </td>
|
| 793 |
+
<td><strong>0.177</strong></td>
|
| 794 |
+
<td><strong>0.075</strong></td>
|
| 795 |
+
<td><strong>0.297</strong></td>
|
| 796 |
+
<td><strong>79.2</strong></td>
|
| 797 |
+
<td><strong>0.186</strong></td>
|
| 798 |
+
<td><strong>0.152</strong></td>
|
| 799 |
+
</tr>
|
| 800 |
+
|
| 801 |
+
</tbody>
|
| 802 |
+
</table>
|
| 803 |
+
|
| 804 |
+
> **Notes:**
|
| 805 |
+
> - We use the same metric calculation pipeline of [OmniDocBench](https://github.com/opendatalab/OmniDocBench).
|
| 806 |
+
> - We delete the Page-header and Page-footer cells in the result markdown.
|
| 807 |
+
|
| 808 |
+
#### Layout Detection
|
| 809 |
+
|
| 810 |
+
<table>
|
| 811 |
+
<thead>
|
| 812 |
+
<tr>
|
| 813 |
+
<th rowspan="2"><strong>Method</strong></th>
|
| 814 |
+
<th colspan="5" style="text-align: center;"><strong>F1@IoU=.50:.05:.95↑</strong></th>
|
| 815 |
+
<th colspan="5" style="text-align: center;"><strong>F1@IoU=.50↑</strong></th>
|
| 816 |
+
</tr>
|
| 817 |
+
<tr>
|
| 818 |
+
<th>Overall</th>
|
| 819 |
+
<th>Text</th>
|
| 820 |
+
<th>Formula</th>
|
| 821 |
+
<th>Table</th>
|
| 822 |
+
<th>Picture</th>
|
| 823 |
+
<th>Overall</th>
|
| 824 |
+
<th>Text</th>
|
| 825 |
+
<th>Formula</th>
|
| 826 |
+
<th>Table</th>
|
| 827 |
+
<th>Picture</th>
|
| 828 |
+
</tr>
|
| 829 |
+
</thead>
|
| 830 |
+
|
| 831 |
+
<tbody>
|
| 832 |
+
<td>DocLayout-YOLO-DocStructBench</td>
|
| 833 |
+
<td>0.733</td>
|
| 834 |
+
<td>0.694</td>
|
| 835 |
+
<td>0.480</td>
|
| 836 |
+
<td>0.803</td>
|
| 837 |
+
<td>0.619</td>
|
| 838 |
+
<td>0.806</td>
|
| 839 |
+
<td>0.779</td>
|
| 840 |
+
<td>0.620</td>
|
| 841 |
+
<td>0.858</td>
|
| 842 |
+
<td>0.678</td>
|
| 843 |
+
</tr>
|
| 844 |
+
|
| 845 |
+
<tr>
|
| 846 |
+
<td>dots.ocr-parse all</td>
|
| 847 |
+
<td>0.831</td>
|
| 848 |
+
<td>0.801</td>
|
| 849 |
+
<td>0.654</td>
|
| 850 |
+
<td>0.838</td>
|
| 851 |
+
<td>0.748</td>
|
| 852 |
+
<td>0.922</td>
|
| 853 |
+
<td>0.909</td>
|
| 854 |
+
<td>0.770</td>
|
| 855 |
+
<td>0.888</td>
|
| 856 |
+
<td>0.831</td>
|
| 857 |
+
</tr>
|
| 858 |
+
|
| 859 |
+
<tr>
|
| 860 |
+
<td> <strong>dots.ocr-detection only</strong> </td>
|
| 861 |
+
<td><strong>0.845</strong></td>
|
| 862 |
+
<td><strong>0.816</strong></td>
|
| 863 |
+
<td><strong>0.716</strong></td>
|
| 864 |
+
<td><strong>0.875</strong></td>
|
| 865 |
+
<td><strong>0.765</strong></td>
|
| 866 |
+
<td><strong>0.930</strong></td>
|
| 867 |
+
<td><strong>0.917</strong></td>
|
| 868 |
+
<td><strong>0.832</strong></td>
|
| 869 |
+
<td><strong>0.918</strong></td>
|
| 870 |
+
<td><strong>0.843</strong></td>
|
| 871 |
+
</tr>
|
| 872 |
+
|
| 873 |
+
</tbody>
|
| 874 |
+
</table>
|
| 875 |
+
|
| 876 |
+
> **Notes:**
|
| 877 |
+
> - prompt_layout_all_en for **parse all**, prompt_layout_only_en for **detection only**, please refer to [prompts](https://github.com/rednote-hilab/dots.ocr/blob/master/dots_ocr/utils/prompts.py)
|
| 878 |
+
|
| 879 |
+
|
| 880 |
+
### 3. olmOCR-bench.
|
| 881 |
+
|
| 882 |
+
<table>
|
| 883 |
+
<thead>
|
| 884 |
+
<tr>
|
| 885 |
+
<th>Model</th>
|
| 886 |
+
<th>ArXiv</th>
|
| 887 |
+
<th>Old Scans<br>Math</th>
|
| 888 |
+
<th>Tables</th>
|
| 889 |
+
<th>Old Scans</th>
|
| 890 |
+
<th>Headers and<br>Footers</th>
|
| 891 |
+
<th>Multi<br>column</th>
|
| 892 |
+
<th>Long Tiny<br>Text</th>
|
| 893 |
+
<th>Base</th>
|
| 894 |
+
<th>Overall</th>
|
| 895 |
+
</tr>
|
| 896 |
+
</thead>
|
| 897 |
+
<tbody>
|
| 898 |
+
<tr>
|
| 899 |
+
<td>GOT OCR</td>
|
| 900 |
+
<td>52.7</td>
|
| 901 |
+
<td>52.0</td>
|
| 902 |
+
<td>0.2</td>
|
| 903 |
+
<td>22.1</td>
|
| 904 |
+
<td>93.6</td>
|
| 905 |
+
<td>42.0</td>
|
| 906 |
+
<td>29.9</td>
|
| 907 |
+
<td>94.0</td>
|
| 908 |
+
<td>48.3 ± 1.1</td>
|
| 909 |
+
</tr>
|
| 910 |
+
<tr>
|
| 911 |
+
<td>Marker</td>
|
| 912 |
+
<td>76.0</td>
|
| 913 |
+
<td>57.9</td>
|
| 914 |
+
<td>57.6</td>
|
| 915 |
+
<td>27.8</td>
|
| 916 |
+
<td>84.9</td>
|
| 917 |
+
<td>72.9</td>
|
| 918 |
+
<td>84.6</td>
|
| 919 |
+
<td>99.1</td>
|
| 920 |
+
<td>70.1 ± 1.1</td>
|
| 921 |
+
</tr>
|
| 922 |
+
<tr>
|
| 923 |
+
<td>MinerU</td>
|
| 924 |
+
<td>75.4</td>
|
| 925 |
+
<td>47.4</td>
|
| 926 |
+
<td>60.9</td>
|
| 927 |
+
<td>17.3</td>
|
| 928 |
+
<td><strong>96.6</strong></td>
|
| 929 |
+
<td>59.0</td>
|
| 930 |
+
<td>39.1</td>
|
| 931 |
+
<td>96.6</td>
|
| 932 |
+
<td>61.5 ± 1.1</td>
|
| 933 |
+
</tr>
|
| 934 |
+
<tr>
|
| 935 |
+
<td>Mistral OCR</td>
|
| 936 |
+
<td>77.2</td>
|
| 937 |
+
<td>67.5</td>
|
| 938 |
+
<td>60.6</td>
|
| 939 |
+
<td>29.3</td>
|
| 940 |
+
<td>93.6</td>
|
| 941 |
+
<td>71.3</td>
|
| 942 |
+
<td>77.1</td>
|
| 943 |
+
<td>99.4</td>
|
| 944 |
+
<td>72.0 ± 1.1</td>
|
| 945 |
+
</tr>
|
| 946 |
+
<tr>
|
| 947 |
+
<td>Nanonets OCR</td>
|
| 948 |
+
<td>67.0</td>
|
| 949 |
+
<td>68.6</td>
|
| 950 |
+
<td>77.7</td>
|
| 951 |
+
<td>39.5</td>
|
| 952 |
+
<td>40.7</td>
|
| 953 |
+
<td>69.9</td>
|
| 954 |
+
<td>53.4</td>
|
| 955 |
+
<td>99.3</td>
|
| 956 |
+
<td>64.5 ± 1.1</td>
|
| 957 |
+
</tr>
|
| 958 |
+
<tr>
|
| 959 |
+
<td>GPT-4o<br>(No Anchor)</td>
|
| 960 |
+
<td>51.5</td>
|
| 961 |
+
<td><strong>75.5</strong></td>
|
| 962 |
+
<td>69.1</td>
|
| 963 |
+
<td>40.9</td>
|
| 964 |
+
<td>94.2</td>
|
| 965 |
+
<td>68.9</td>
|
| 966 |
+
<td>54.1</td>
|
| 967 |
+
<td>96.7</td>
|
| 968 |
+
<td>68.9 ± 1.1</td>
|
| 969 |
+
</tr>
|
| 970 |
+
<tr>
|
| 971 |
+
<td>GPT-4o<br>(Anchored)</td>
|
| 972 |
+
<td>53.5</td>
|
| 973 |
+
<td>74.5</td>
|
| 974 |
+
<td>70.0</td>
|
| 975 |
+
<td>40.7</td>
|
| 976 |
+
<td>93.8</td>
|
| 977 |
+
<td>69.3</td>
|
| 978 |
+
<td>60.6</td>
|
| 979 |
+
<td>96.8</td>
|
| 980 |
+
<td>69.9 ± 1.1</td>
|
| 981 |
+
</tr>
|
| 982 |
+
<tr>
|
| 983 |
+
<td>Gemini Flash 2<br>(No Anchor)</td>
|
| 984 |
+
<td>32.1</td>
|
| 985 |
+
<td>56.3</td>
|
| 986 |
+
<td>61.4</td>
|
| 987 |
+
<td>27.8</td>
|
| 988 |
+
<td>48.0</td>
|
| 989 |
+
<td>58.7</td>
|
| 990 |
+
<td><strong>84.4</strong></td>
|
| 991 |
+
<td>94.0</td>
|
| 992 |
+
<td>57.8 ± 1.1</td>
|
| 993 |
+
</tr>
|
| 994 |
+
<tr>
|
| 995 |
+
<td>Gemini Flash 2<br>(Anchored)</td>
|
| 996 |
+
<td>54.5</td>
|
| 997 |
+
<td>56.1</td>
|
| 998 |
+
<td>72.1</td>
|
| 999 |
+
<td>34.2</td>
|
| 1000 |
+
<td>64.7</td>
|
| 1001 |
+
<td>61.5</td>
|
| 1002 |
+
<td>71.5</td>
|
| 1003 |
+
<td>95.6</td>
|
| 1004 |
+
<td>63.8 ± 1.2</td>
|
| 1005 |
+
</tr>
|
| 1006 |
+
<tr>
|
| 1007 |
+
<td>Qwen 2 VL<br>(No Anchor)</td>
|
| 1008 |
+
<td>19.7</td>
|
| 1009 |
+
<td>31.7</td>
|
| 1010 |
+
<td>24.2</td>
|
| 1011 |
+
<td>17.1</td>
|
| 1012 |
+
<td>88.9</td>
|
| 1013 |
+
<td>8.3</td>
|
| 1014 |
+
<td>6.8</td>
|
| 1015 |
+
<td>55.5</td>
|
| 1016 |
+
<td>31.5 ± 0.9</td>
|
| 1017 |
+
</tr>
|
| 1018 |
+
<tr>
|
| 1019 |
+
<td>Qwen 2.5 VL<br>(No Anchor)</td>
|
| 1020 |
+
<td>63.1</td>
|
| 1021 |
+
<td>65.7</td>
|
| 1022 |
+
<td>67.3</td>
|
| 1023 |
+
<td>38.6</td>
|
| 1024 |
+
<td>73.6</td>
|
| 1025 |
+
<td>68.3</td>
|
| 1026 |
+
<td>49.1</td>
|
| 1027 |
+
<td>98.3</td>
|
| 1028 |
+
<td>65.5 ± 1.2</td>
|
| 1029 |
+
</tr>
|
| 1030 |
+
<tr>
|
| 1031 |
+
<td>olmOCR v0.1.75<br>(No Anchor)</td>
|
| 1032 |
+
<td>71.5</td>
|
| 1033 |
+
<td>71.4</td>
|
| 1034 |
+
<td>71.4</td>
|
| 1035 |
+
<td><strong>42.8</strong></td>
|
| 1036 |
+
<td>94.1</td>
|
| 1037 |
+
<td>77.7</td>
|
| 1038 |
+
<td>71.0</td>
|
| 1039 |
+
<td>97.8</td>
|
| 1040 |
+
<td>74.7 ± 1.1</td>
|
| 1041 |
+
</tr>
|
| 1042 |
+
<tr>
|
| 1043 |
+
<td>olmOCR v0.1.75<br>(Anchored)</td>
|
| 1044 |
+
<td>74.9</td>
|
| 1045 |
+
<td>71.2</td>
|
| 1046 |
+
<td>71.0</td>
|
| 1047 |
+
<td>42.2</td>
|
| 1048 |
+
<td>94.5</td>
|
| 1049 |
+
<td>78.3</td>
|
| 1050 |
+
<td>73.3</td>
|
| 1051 |
+
<td>98.3</td>
|
| 1052 |
+
<td>75.5 ± 1.0</td>
|
| 1053 |
+
</tr>
|
| 1054 |
+
<tr>
|
| 1055 |
+
<td>MonkeyOCR-pro-3B</td>
|
| 1056 |
+
<td><strong>83.8</strong></td>
|
| 1057 |
+
<td>68.8</td>
|
| 1058 |
+
<td>74.6</td>
|
| 1059 |
+
<td>36.1</td>
|
| 1060 |
+
<td>91.2</td>
|
| 1061 |
+
<td>76.6</td>
|
| 1062 |
+
<td>80.1</td>
|
| 1063 |
+
<td>95.3</td>
|
| 1064 |
+
<td>75.8 ± 1.0</td>
|
| 1065 |
+
</tr>
|
| 1066 |
+
<tr>
|
| 1067 |
+
<td><strong>dots.ocr</strong></td>
|
| 1068 |
+
<td>82.1</td>
|
| 1069 |
+
<td>64.2</td>
|
| 1070 |
+
<td><strong>88.3</strong></td>
|
| 1071 |
+
<td>40.9</td>
|
| 1072 |
+
<td>94.1</td>
|
| 1073 |
+
<td><strong>82.4</strong></td>
|
| 1074 |
+
<td>81.2</td>
|
| 1075 |
+
<td><strong>99.5</strong></td>
|
| 1076 |
+
<td><strong>79.1 ± 1.0</strong></td>
|
| 1077 |
+
</tr>
|
| 1078 |
+
</tbody>
|
| 1079 |
+
</table>
|
| 1080 |
+
|
| 1081 |
+
|
| 1082 |
+
> **Note:**
|
| 1083 |
+
> - The metrics are from [MonkeyOCR](https://github.com/Yuliang-Liu/MonkeyOCR),
|
| 1084 |
+
[olmocr](https://github.com/allenai/olmocr), and our own internal evaluations.
|
| 1085 |
+
> - We delete the Page-header and Page-footer cells in the result markdown.
|
| 1086 |
+
|
| 1087 |
+
|
| 1088 |
+
|
| 1089 |
+
# Quick Start
|
| 1090 |
+
## 1. Installation
|
| 1091 |
+
### Install dots.ocr
|
| 1092 |
+
```shell
|
| 1093 |
+
conda create -n dots_ocr python=3.12
|
| 1094 |
+
conda activate dots_ocr
|
| 1095 |
+
|
| 1096 |
+
git clone https://github.com/rednote-hilab/dots.ocr.git
|
| 1097 |
+
cd dots.ocr
|
| 1098 |
+
|
| 1099 |
+
# Install pytorch, see https://pytorch.org/get-started/previous-versions/ for your cuda version
|
| 1100 |
+
pip install torch==2.7.0 torchvision==0.22.0 torchaudio==2.7.0 --index-url https://download.pytorch.org/whl/cu128
|
| 1101 |
+
pip install -e .
|
| 1102 |
+
```
|
| 1103 |
+
|
| 1104 |
+
If you have trouble with the installation, try our [Docker Image](https://hub.docker.com/r/rednotehilab/dots.ocr) for an easier setup, and follow these steps:
|
| 1105 |
+
```shell
|
| 1106 |
+
git clone https://github.com/rednote-hilab/dots.ocr.git
|
| 1107 |
+
cd dots.ocr
|
| 1108 |
+
pip install -e .
|
| 1109 |
+
```
|
| 1110 |
+
|
| 1111 |
+
|
| 1112 |
+
### Download Model Weights
|
| 1113 |
+
> 💡**Note:** Please use a directory name without periods (e.g., `DotsOCR` instead of `dots.ocr`) for the model save path. This is a temporary workaround pending our integration with Transformers.
|
| 1114 |
+
```shell
|
| 1115 |
+
python3 tools/download_model.py
|
| 1116 |
+
```
|
| 1117 |
+
|
| 1118 |
+
|
| 1119 |
+
## 2. Deployment
|
| 1120 |
+
### vLLM inference
|
| 1121 |
+
We highly recommend using vllm for deployment and inference. All of our evaluations results are based on vllm version 0.9.1.
|
| 1122 |
+
The [Docker Image](https://hub.docker.com/r/rednotehilab/dots.ocr) is based on the official vllm image. You can also follow [Dockerfile](https://github.com/rednote-hilab/dots.ocr/blob/master/docker/Dockerfile) to build the deployment environment by yourself.
|
| 1123 |
+
|
| 1124 |
+
```shell
|
| 1125 |
+
# You need to register model to vllm at first
|
| 1126 |
+
python3 tools/download_model.py
|
| 1127 |
+
export hf_model_path=./weights/DotsOCR # Path to your downloaded model weights, Please use a directory name without periods (e.g., `DotsOCR` instead of `dots.ocr`) for the model save path. This is a temporary workaround pending our integration with Transformers.
|
| 1128 |
+
export PYTHONPATH=$(dirname "$hf_model_path"):$PYTHONPATH
|
| 1129 |
+
sed -i '/^from vllm\.entrypoints\.cli\.main import main$/a\
|
| 1130 |
+
from DotsOCR import modeling_dots_ocr_vllm' `which vllm` # If you downloaded model weights by yourself, please replace `DotsOCR` by your model saved directory name, and remember to use a directory name without periods (e.g., `DotsOCR` instead of `dots.ocr`)
|
| 1131 |
+
|
| 1132 |
+
# launch vllm server
|
| 1133 |
+
CUDA_VISIBLE_DEVICES=0 vllm serve ${hf_model_path} --tensor-parallel-size 1 --gpu-memory-utilization 0.95 --chat-template-content-format string --served-model-name model --trust-remote-code
|
| 1134 |
+
|
| 1135 |
+
# If you get a ModuleNotFoundError: No module named 'DotsOCR', please check the note above on the saved model directory name.
|
| 1136 |
+
|
| 1137 |
+
# vllm api demo
|
| 1138 |
+
python3 ./demo/demo_vllm.py --prompt_mode prompt_layout_all_en
|
| 1139 |
+
```
|
| 1140 |
+
|
| 1141 |
+
### Hugginface inference
|
| 1142 |
+
```shell
|
| 1143 |
+
python3 demo/demo_hf.py
|
| 1144 |
+
```
|
| 1145 |
+
|
| 1146 |
+
<details>
|
| 1147 |
+
<summary><b>Hugginface inference details</b></summary>
|
| 1148 |
+
|
| 1149 |
+
```python
|
| 1150 |
+
import torch
|
| 1151 |
+
from transformers import AutoModelForCausalLM, AutoProcessor, AutoTokenizer
|
| 1152 |
+
from qwen_vl_utils import process_vision_info
|
| 1153 |
+
from dots_ocr.utils import dict_promptmode_to_prompt
|
| 1154 |
+
|
| 1155 |
+
model_path = "./weights/DotsOCR"
|
| 1156 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 1157 |
+
model_path,
|
| 1158 |
+
attn_implementation="flash_attention_2",
|
| 1159 |
+
torch_dtype=torch.bfloat16,
|
| 1160 |
+
device_map="auto",
|
| 1161 |
+
trust_remote_code=True
|
| 1162 |
+
)
|
| 1163 |
+
processor = AutoProcessor.from_pretrained(model_path, trust_remote_code=True)
|
| 1164 |
+
|
| 1165 |
+
image_path = "demo/demo_image1.jpg"
|
| 1166 |
+
prompt = """Please output the layout information from the PDF image, including each layout element's bbox, its category, and the corresponding text content within the bbox.
|
| 1167 |
+
|
| 1168 |
+
1. Bbox format: [x1, y1, x2, y2]
|
| 1169 |
+
|
| 1170 |
+
2. Layout Categories: The possible categories are ['Caption', 'Footnote', 'Formula', 'List-item', 'Page-footer', 'Page-header', 'Picture', 'Section-header', 'Table', 'Text', 'Title'].
|
| 1171 |
+
|
| 1172 |
+
3. Text Extraction & Formatting Rules:
|
| 1173 |
+
- Picture: For the 'Picture' category, the text field should be omitted.
|
| 1174 |
+
- Formula: Format its text as LaTeX.
|
| 1175 |
+
- Table: Format its text as HTML.
|
| 1176 |
+
- All Others (Text, Title, etc.): Format their text as Markdown.
|
| 1177 |
+
|
| 1178 |
+
4. Constraints:
|
| 1179 |
+
- The output text must be the original text from the image, with no translation.
|
| 1180 |
+
- All layout elements must be sorted according to human reading order.
|
| 1181 |
+
|
| 1182 |
+
5. Final Output: The entire output must be a single JSON object.
|
| 1183 |
+
"""
|
| 1184 |
+
|
| 1185 |
+
messages = [
|
| 1186 |
+
{
|
| 1187 |
+
"role": "user",
|
| 1188 |
+
"content": [
|
| 1189 |
+
{
|
| 1190 |
+
"type": "image",
|
| 1191 |
+
"image": image_path
|
| 1192 |
+
},
|
| 1193 |
+
{"type": "text", "text": prompt}
|
| 1194 |
+
]
|
| 1195 |
+
}
|
| 1196 |
+
]
|
| 1197 |
+
|
| 1198 |
+
# Preparation for inference
|
| 1199 |
+
text = processor.apply_chat_template(
|
| 1200 |
+
messages,
|
| 1201 |
+
tokenize=False,
|
| 1202 |
+
add_generation_prompt=True
|
| 1203 |
+
)
|
| 1204 |
+
image_inputs, video_inputs = process_vision_info(messages)
|
| 1205 |
+
inputs = processor(
|
| 1206 |
+
text=[text],
|
| 1207 |
+
images=image_inputs,
|
| 1208 |
+
videos=video_inputs,
|
| 1209 |
+
padding=True,
|
| 1210 |
+
return_tensors="pt",
|
| 1211 |
+
)
|
| 1212 |
+
|
| 1213 |
+
inputs = inputs.to("cuda")
|
| 1214 |
+
|
| 1215 |
+
# Inference: Generation of the output
|
| 1216 |
+
generated_ids = model.generate(**inputs, max_new_tokens=24000)
|
| 1217 |
+
generated_ids_trimmed = [
|
| 1218 |
+
out_ids[len(in_ids) :] for in_ids, out_ids in zip(inputs.input_ids, generated_ids)
|
| 1219 |
+
]
|
| 1220 |
+
output_text = processor.batch_decode(
|
| 1221 |
+
generated_ids_trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False
|
| 1222 |
+
)
|
| 1223 |
+
print(output_text)
|
| 1224 |
+
|
| 1225 |
+
```
|
| 1226 |
+
|
| 1227 |
+
</details>
|
| 1228 |
+
|
| 1229 |
+
## 3. Document Parse
|
| 1230 |
+
**Based on vLLM server**, you can parse an image or a pdf file using the following commands:
|
| 1231 |
+
```bash
|
| 1232 |
+
|
| 1233 |
+
# Parse all layout info, both detection and recognition
|
| 1234 |
+
# Parse a single image
|
| 1235 |
+
python3 dots_ocr/parser.py demo/demo_image1.jpg
|
| 1236 |
+
# Parse a single PDF
|
| 1237 |
+
python3 dots_ocr/parser.py demo/demo_pdf1.pdf --num_threads 64 # try bigger num_threads for pdf with a large number of pages
|
| 1238 |
+
|
| 1239 |
+
# Layout detection only
|
| 1240 |
+
python3 dots_ocr/parser.py demo/demo_image1.jpg --prompt prompt_layout_only_en
|
| 1241 |
+
|
| 1242 |
+
# Parse text only, except Page-header and Page-footer
|
| 1243 |
+
python3 dots_ocr/parser.py demo/demo_image1.jpg --prompt prompt_ocr
|
| 1244 |
+
|
| 1245 |
+
# Parse layout info by bbox
|
| 1246 |
+
python3 dots_ocr/parser.py demo/demo_image1.jpg --prompt prompt_grounding_ocr --bbox 163 241 1536 705
|
| 1247 |
+
|
| 1248 |
+
```
|
| 1249 |
+
|
| 1250 |
+
<details>
|
| 1251 |
+
<summary><b>Output Results</b></summary>
|
| 1252 |
+
|
| 1253 |
+
1. **Structured Layout Data** (`demo_image1.json`): A JSON file containing the detected layout elements, including their bounding boxes, categories, and extracted text.
|
| 1254 |
+
2. **Processed Markdown File** (`demo_image1.md`): A Markdown file generated from the concatenated text of all detected cells.
|
| 1255 |
+
* An additional version, `demo_image1_nohf.md`, is also provided, which excludes page headers and footers for compatibility with benchmarks like Omnidocbench and olmOCR-bench.
|
| 1256 |
+
3. **Layout Visualization** (`demo_image1.jpg`): The original image with the detected layout bounding boxes drawn on it.
|
| 1257 |
+
|
| 1258 |
+
</details>
|
| 1259 |
+
|
| 1260 |
+
## 4. Demo
|
| 1261 |
+
You can run the demo with the following command, or try directly at [live demo](https://dotsocr.xiaohongshu.com/)
|
| 1262 |
+
```bash
|
| 1263 |
+
python demo/demo_gradio.py
|
| 1264 |
+
```
|
| 1265 |
+
|
| 1266 |
+
We also provide a demo for grounding ocr:
|
| 1267 |
+
```bash
|
| 1268 |
+
python demo/demo_gradio_annotion.py
|
| 1269 |
+
```
|
| 1270 |
+
|
| 1271 |
+
|
| 1272 |
+
### Example for formula document
|
| 1273 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/formula1.png" alt="formula1.png" border="0" />
|
| 1274 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/formula2.png" alt="formula2.png" border="0" />
|
| 1275 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/formula3.png" alt="formula3.png" border="0" />
|
| 1276 |
+
|
| 1277 |
+
### Example for table document
|
| 1278 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/table1.png" alt="table1.png" border="0" />
|
| 1279 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/table2.png" alt="table2.png" border="0" />
|
| 1280 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/table3.png" alt="table3.png" border="0" />
|
| 1281 |
+
|
| 1282 |
+
### Example for multilingual document
|
| 1283 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/Tibetan.png" alt="Tibetan.png" border="0" />
|
| 1284 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/tradition_zh.png" alt="tradition_zh.png" border="0" />
|
| 1285 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/nl.png" alt="nl.png" border="0" />
|
| 1286 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/kannada.png" alt="kannada.png" border="0" />
|
| 1287 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/russian.png" alt="russian.png" border="0" />
|
| 1288 |
+
|
| 1289 |
+
### Example for reading order
|
| 1290 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/reading_order.png" alt="reading_order.png" border="0" />
|
| 1291 |
+
|
| 1292 |
+
### Example for grounding ocr
|
| 1293 |
+
<img src="https://raw.githubusercontent.com/rednote-hilab/dots.ocr/master/assets/showcase/grounding.png" alt="grounding.png" border="0" />
|
| 1294 |
+
|
| 1295 |
+
|
| 1296 |
+
## Acknowledgments
|
| 1297 |
+
We would like to thank [Qwen2.5-VL](https://github.com/QwenLM/Qwen2.5-VL), [aimv2](https://github.com/apple/ml-aim), [MonkeyOCR](https://github.com/Yuliang-Liu/MonkeyOCR),
|
| 1298 |
+
[OmniDocBench](https://github.com/opendatalab/OmniDocBench), [PyMuPDF](https://github.com/pymupdf/PyMuPDF), for providing code and models.
|
| 1299 |
+
|
| 1300 |
+
We also thank [DocLayNet](https://github.com/DS4SD/DocLayNet), [M6Doc](https://github.com/HCIILAB/M6Doc), [CDLA](https://github.com/buptlihang/CDLA), [D4LA](https://github.com/AlibabaResearch/AdvancedLiterateMachinery) for providing valuable datasets.
|
| 1301 |
+
|
| 1302 |
+
## Limitation & Future Work
|
| 1303 |
+
|
| 1304 |
+
- **Complex Document Elements:**
|
| 1305 |
+
- **Table&Formula**: dots.ocr is not yet perfect for high-complexity tables and formula extraction.
|
| 1306 |
+
- **Picture**: Pictures in documents are currently not parsed.
|
| 1307 |
+
|
| 1308 |
+
- **Parsing Failures:** The model may fail to parse under certain conditions:
|
| 1309 |
+
- When the character-to-pixel ratio is excessively high. Try enlarging the image or increasing the PDF parsing DPI (a setting of 200 is recommended). However, please note that the model performs optimally on images with a resolution under 11289600 pixels.
|
| 1310 |
+
- Continuous special characters, such as ellipses (`...`) and underscores (`_`), may cause the prediction output to repeat endlessly. In such scenarios, consider using alternative prompts like `prompt_layout_only_en`, `prompt_ocr`, or `prompt_grounding_ocr` ([details here](https://github.com/rednote-hilab/dots.ocr/blob/master/dots_ocr/utils/prompts.py)).
|
| 1311 |
+
|
| 1312 |
+
- **Performance Bottleneck:** Despite its 1.7B parameter LLM foundation, **dots.ocr** is not yet optimized for high-throughput processing of large PDF volumes.
|
| 1313 |
+
|
| 1314 |
+
We are committed to achieving more accurate table and formula parsing, as well as enhancing the model's OCR capabilities for broader generalization, all while aiming for **a more powerful, more efficient model**. Furthermore, we are actively considering the development of **a more general-purpose perception model** based on Vision-Language Models (VLMs), which would integrate general detection, image captioning, and OCR tasks into a unified framework. **Parsing the content of the pictures in the documents** is also a key priority for our future work.
|
| 1315 |
+
We believe that collaboration is the key to tackling these exciting challenges. If you are passionate about advancing the frontiers of document intelligence and are interested in contributing to these future endeavors, we would love to hear from you. Please reach out to us via email at: [yanqing4@xiaohongshu.com].
|
chat_template.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{%- for m in messages %}{%- if m.role == 'system' %}{{- '<|system|>' + m.content + '<|endofsystem|>\n' }}{%- elif m.role == 'user' %}{% if m.content is string %}{{- '<|user|>' + m.content + '<|endofuser|>' }}{% else %} {% for content in m.content %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|img|><|imgpad|><|endofimg|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|img|><|video_pad|><|endofimg|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}{%- endif %}{%- elif m.role == 'assistant' %}{{- '<|assistant|>' + m.content }}{%- if not loop.last %}{{- '<|endofassistant|>' }}{%- endif %}{%- endif %}{%- endfor %}{%- if messages[-1].role != 'assistant' %}{{- '<|assistant|>' }}{%- endif %}"
|
| 3 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"DotsOCRForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"model_type": "dots_ocr",
|
| 6 |
+
"auto_map": {
|
| 7 |
+
"AutoConfig": "configuration_dots.DotsOCRConfig",
|
| 8 |
+
"AutoModelForCausalLM": "modeling_dots_ocr.DotsOCRForCausalLM"
|
| 9 |
+
},
|
| 10 |
+
"attention_bias": true,
|
| 11 |
+
"attention_dropout": 0.0,
|
| 12 |
+
"hidden_act": "silu",
|
| 13 |
+
"hidden_size": 1536,
|
| 14 |
+
"initializer_range": 0.02,
|
| 15 |
+
"intermediate_size": 8960,
|
| 16 |
+
"max_position_embeddings": 131072,
|
| 17 |
+
"max_window_layers": 28,
|
| 18 |
+
"num_attention_heads": 12,
|
| 19 |
+
"num_hidden_layers": 28,
|
| 20 |
+
"num_key_value_heads": 2,
|
| 21 |
+
"rms_norm_eps": 1e-06,
|
| 22 |
+
"rope_scaling": null,
|
| 23 |
+
"rope_theta": 1000000,
|
| 24 |
+
"sliding_window": 131072,
|
| 25 |
+
"tie_word_embeddings": false,
|
| 26 |
+
"torch_dtype": "bfloat16",
|
| 27 |
+
"transformers_version": "4.51.0",
|
| 28 |
+
"use_cache": true,
|
| 29 |
+
"use_sliding_window": false,
|
| 30 |
+
"vocab_size": 151936,
|
| 31 |
+
"image_token_id": 151665,
|
| 32 |
+
"video_token_id": 151656,
|
| 33 |
+
"vision_config": {
|
| 34 |
+
"embed_dim": 1536,
|
| 35 |
+
"hidden_size": 1536,
|
| 36 |
+
"intermediate_size": 4224,
|
| 37 |
+
"num_hidden_layers": 42,
|
| 38 |
+
"num_attention_heads": 12,
|
| 39 |
+
"num_channels": 3,
|
| 40 |
+
"patch_size": 14,
|
| 41 |
+
"post_norm": true,
|
| 42 |
+
"rms_norm_eps": 1e-05,
|
| 43 |
+
"spatial_merge_size": 2,
|
| 44 |
+
"temporal_patch_size": 1,
|
| 45 |
+
"use_bias": false,
|
| 46 |
+
"attn_implementation": "flash_attention_2",
|
| 47 |
+
"init_merger_std": 0.02,
|
| 48 |
+
"initializer_range": 0.02,
|
| 49 |
+
"is_causal": false
|
| 50 |
+
}
|
| 51 |
+
}
|
configuration_dots.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Optional
|
| 2 |
+
from transformers.configuration_utils import PretrainedConfig
|
| 3 |
+
from transformers.models.qwen2 import Qwen2Config
|
| 4 |
+
from transformers import Qwen2_5_VLProcessor, AutoProcessor
|
| 5 |
+
from transformers.models.auto.configuration_auto import CONFIG_MAPPING
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class DotsVisionConfig(PretrainedConfig):
|
| 9 |
+
model_type: str = "dots_vit"
|
| 10 |
+
|
| 11 |
+
def __init__(
|
| 12 |
+
self,
|
| 13 |
+
embed_dim: int = 1536, # vision encoder embed size
|
| 14 |
+
hidden_size: int = 1536, # after merger hidden size
|
| 15 |
+
intermediate_size: int = 4224,
|
| 16 |
+
num_hidden_layers: int = 42,
|
| 17 |
+
num_attention_heads: int = 12,
|
| 18 |
+
num_channels: int = 3,
|
| 19 |
+
patch_size: int = 14,
|
| 20 |
+
spatial_merge_size: int = 2,
|
| 21 |
+
temporal_patch_size: int = 1,
|
| 22 |
+
rms_norm_eps: float = 1e-5,
|
| 23 |
+
use_bias: bool = False,
|
| 24 |
+
attn_implementation="flash_attention_2", # "eager","sdpa","flash_attention_2"
|
| 25 |
+
initializer_range=0.02,
|
| 26 |
+
init_merger_std=0.02,
|
| 27 |
+
is_causal=False, # ve causal forward
|
| 28 |
+
post_norm=True,
|
| 29 |
+
gradient_checkpointing=False,
|
| 30 |
+
**kwargs: Any,
|
| 31 |
+
):
|
| 32 |
+
super().__init__(**kwargs)
|
| 33 |
+
self.embed_dim = embed_dim
|
| 34 |
+
self.hidden_size = hidden_size
|
| 35 |
+
self.intermediate_size = intermediate_size
|
| 36 |
+
self.num_hidden_layers = num_hidden_layers
|
| 37 |
+
self.num_attention_heads = num_attention_heads
|
| 38 |
+
self.num_channels = num_channels
|
| 39 |
+
self.patch_size = patch_size
|
| 40 |
+
self.spatial_merge_size = spatial_merge_size
|
| 41 |
+
self.temporal_patch_size = temporal_patch_size
|
| 42 |
+
self.rms_norm_eps = rms_norm_eps
|
| 43 |
+
self.use_bias = use_bias
|
| 44 |
+
self.attn_implementation = attn_implementation
|
| 45 |
+
self.initializer_range = initializer_range
|
| 46 |
+
self.init_merger_std = init_merger_std
|
| 47 |
+
self.is_causal = is_causal
|
| 48 |
+
self.post_norm = post_norm
|
| 49 |
+
self.gradient_checkpointing = gradient_checkpointing
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class DotsOCRConfig(Qwen2Config):
|
| 54 |
+
model_type = "dots_ocr"
|
| 55 |
+
def __init__(self,
|
| 56 |
+
image_token_id = 151665,
|
| 57 |
+
video_token_id = 151656,
|
| 58 |
+
vision_config: Optional[dict] = None, *args, **kwargs):
|
| 59 |
+
super().__init__(*args, **kwargs)
|
| 60 |
+
self.image_token_id = image_token_id
|
| 61 |
+
self.video_token_id = video_token_id
|
| 62 |
+
self.vision_config = DotsVisionConfig(**(vision_config or {}))
|
| 63 |
+
|
| 64 |
+
def save_pretrained(self, save_directory, **kwargs):
|
| 65 |
+
self._auto_class = None
|
| 66 |
+
super().save_pretrained(save_directory, **kwargs)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class DotsVLProcessor(Qwen2_5_VLProcessor):
|
| 70 |
+
def __init__(self, image_processor=None, tokenizer=None, chat_template=None, **kwargs):
|
| 71 |
+
super().__init__(image_processor, tokenizer, chat_template=chat_template)
|
| 72 |
+
self.image_token = "<|imgpad|>" if not hasattr(tokenizer, "image_token") else tokenizer.image_token
|
| 73 |
+
self.image_token_id = 151665 if not hasattr(tokenizer, "image_token_id") else tokenizer.image_token_id
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
AutoProcessor.register("dots_ocr", DotsVLProcessor)
|
| 77 |
+
CONFIG_MAPPING.register("dots_ocr", DotsOCRConfig)
|
dots.ocr LICENSE AGREEMENT
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
dots.ocr LICENSE AGREEMENT
|
| 2 |
+
|
| 3 |
+
Effective Date: [ August 8, 2025]
|
| 4 |
+
|
| 5 |
+
Copyright Holder: [Xingyin Information Technology (Shanghai) Co., Ltd]
|
| 6 |
+
|
| 7 |
+
This License Agreement (“Agreement”) governs Your use, reproduction, modification, and distribution of dots.ocr (the "Model Materials"). This Agreement is designed to maximize the openness and use of the Model Materials while addressing the unique legal, ethical, and technical challenges posed by large language models.
|
| 8 |
+
|
| 9 |
+
WHEREAS, Licensor has developed the dots.ocr document parsing model and intends to distribute the Model Materials under an open‑source framework;
|
| 10 |
+
WHEREAS, traditional open-source licenses (e.g., the MIT License) may not fully address the complexity inherent complexities of document parsing models, namely their multiple components (code, weights, training data), potential ethical risks, data‑governance issues, and intellectual‑property and liability questions regarding AI‑generated content;
|
| 11 |
+
WHEREAS, Licensor seeks to provide a legal framework that ensures maximum access to and use of the Model Materials while clearly defining the rights, obligations, and liabilities of Licensee;
|
| 12 |
+
|
| 13 |
+
THEREFORE, the parties agree that, subject to the MIT License, they shall be bound by the following terms and conditions:
|
| 14 |
+
|
| 15 |
+
1. Definitions and Interpretation
|
| 16 |
+
Purpose: To define key terms used in this Agreement, particularly "Model Materials," ensuring clarity of the license scope beyond traditional software code. To clarify the order of precedence between this Agreement and the MIT License to avoid conflict.
|
| 17 |
+
|
| 18 |
+
1.1 “Licensor” shall mean the entity providing the Model Materials under this Agreement, namely [Xingyin Information Technology (Shanghai) Co., Ltd].
|
| 19 |
+
|
| 20 |
+
1.2 “Licensee” or "You" shall mean any individual or entity exercising permissions granted by this Agreement.
|
| 21 |
+
|
| 22 |
+
1.3 “Model Materials” shall mean all materials provided by Licensor under this Agreement, including but not limited to:
|
| 23 |
+
(a) one or more machine‑learning models, including architecture and trained parameters (i.e., model weights);
|
| 24 |
+
(b) all associated preprocessing, training, inference, and fine‑tuning code;
|
| 25 |
+
(c) training datasets and evaluation scripts (or their detailed descriptions and access mechanisms); and
|
| 26 |
+
(d) any accompanying documentation, metadata, and tools.
|
| 27 |
+
The above Model Materials shall be subject to the content published on the Licensor’s website or GitHub repository at https://github.com/rednote-hilab/dots.ocr.
|
| 28 |
+
|
| 29 |
+
1.4 “Outputs” shall mean any content generated through the use of the Model Materials, such as text, tables, code,layout information, and formulas extracted from documents.
|
| 30 |
+
|
| 31 |
+
1.5 “MIT License” shall mean The MIT Open Source License published by the Massachusetts Institute of Technology.
|
| 32 |
+
|
| 33 |
+
1.6 Priority of Agreement. In the event of any conflict or inconsistency between this Agreement and the MIT License, the terms of the MIT License shall prevail. However, if the terms of the MIT License are ambiguous or silent on a particular matter, the provisions of this Agreement shall apply and supplement the MIT License.
|
| 34 |
+
|
| 35 |
+
2. Grant of Rights and Scope of Use
|
| 36 |
+
|
| 37 |
+
Purpose: To grant broad, permissive rights to the Licensee for the Model Materials—including code, weights, data, and documentation—to ensure maximum openness and flexibility while clarifying the free use of model-generated content. Additionally, it clarifies the feasibility of transitioning from open-source to commercial‑use and the use of OpenAPI interfaces.
|
| 38 |
+
|
| 39 |
+
2.1 Grant of Copyright License. Subject to Licensee's compliance with this Agreement, Licensor hereby grants Licensee a perpetual, worldwide, non‑exclusive, no-charge, royalty‑free copyright license to use (run or test), reproduce, modify, create derivative works of, merge, publish, distribute the Model Materials; sublicense and/or sell copies of the Model Materials or any derivative works thereof; and incorporate the unmodified or modified Model Materials into proprietary products or services, including for commercial purposes, software‑as‑a‑service (SaaS) offerings, or via OpenAPI or other interfaces.
|
| 40 |
+
|
| 41 |
+
2.2 Fundamental Capabilities. The Model Materials only provide the fundamental model’s capabilities. Licensees may develop derivative AI applications or undertake task‑specific training thereon.
|
| 42 |
+
|
| 43 |
+
2.3 From Open Source to Commercial Use. The open-source release does not preclude Licensor’s commercial exploitation of the Model Materials, in whole or in part. Any such commercial use shall, at that time, be subject to license agreements between Licensor and applicable users.
|
| 44 |
+
|
| 45 |
+
2.4 API‑Service Exception. Licensees who access the Model Materials through API calls or provide model services via API interfaces(without directly distributing model weights )shall not be subject to this Agreement unless otherwise expressly agreed. Instead, such use shall be governed by the API terms of use published by Licensor (if any).
|
| 46 |
+
|
| 47 |
+
3. Acceptable Use Policy and Prohibited Uses
|
| 48 |
+
|
| 49 |
+
3.1 Responsible Use. Licensee must use the Model Materials in a responsible, ethical, and lawful manner, in compliance with all applicable laws, regulations, industry standards, and best practices.
|
| 50 |
+
|
| 51 |
+
3.2 Enterprise On‑Premises Deployment. The Licensee may deploy the Model Materials in closed‑source, on‑premises enterprise environments.
|
| 52 |
+
|
| 53 |
+
3.3 Prohibited Uses. Any breach of the prohibitions below will result in the automatic termination of all licenses granted under this Agreement. Licensee agrees not to use the Model Materials or any derivative works thereof, in connection with:
|
| 54 |
+
(a) Identification and Utilization of Illegal/Harmful Content:Includes identifying graphic/text materials used for counterfeiting certificates/invoices, perpetrating fraud, or launching cyberattacks; or processing images containing illegal content such as violence, criminal activities, disinformation, or child exploitation.
|
| 55 |
+
(b) Privacy Infringement and Discriminatory Practices:Extracting personal sensitive information (e.g., ID numbers, medical records, biometric data) or protected characteristics (e.g., race, gender) from images without legal authorization or consent, for purposes of privacy violation, automated discriminatory decision-making, or harassment.
|
| 56 |
+
(c) Copyright Restrictions:Licensees shall not use the tool for unauthorized digitization of publications/document scanning or bulk scraping of content. Any use involving publications or other copyright-protected materials must first obtain relevant permissions.
|
| 57 |
+
|
| 58 |
+
4. Intellectual Property Ownership and Contributions
|
| 59 |
+
|
| 60 |
+
4.1 Licensor's Copyright Reservation. Licensor reserves all right, title, and interest in and to the Model Materials (including the model architecture, parameters, code, and original training data), except as expressly licensed herein. The original copyright of the Model Materials belongs to the Licensor.
|
| 61 |
+
|
| 62 |
+
4.2 Patent License. Subject to the terms and conditions of this Agreement, Licensor hereby grants Licensee a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Model Materials, where such license applies only to those patent claims licensable by the Lisensor that are necessarily infringed by its contribution(s).
|
| 63 |
+
If Licensee institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Model Materials constitute direct or contributory patent infringement, then any patent licenses granted under this License for the Model Materials shall terminate as of the date such litigation is asserted or filed.
|
| 64 |
+
|
| 65 |
+
4.3 Outputs: The Outputs generated through the use of the Model Materials generally refer to text, tables, layouts, and other content extracted from documents or images. The extracted content itself does not generate new intellectual property rights, and all intellectual property remains with the original authors or copyright holders. The Licensee is responsible for due diligence regarding the legality of the Outputs, particularly where the content extracted by the OCR model may be substantially similar to existing copyrighted works, which could present intellectual property infringement risks. The Licensor assumes no liability for such infringements.
|
| 66 |
+
4.4 Trademarks. Nothing in this License permits Licensee to make use of Licensor’s trademarks, trade names, logos (e.g., “rednote,” “Xiaohongshu,” “dots.ocr”) or to otherwise suggest endorsement or misrepresent the relationship between the parties, unless Licensor’s prior written approval is granted.
|
| 67 |
+
|
| 68 |
+
5. Data Governance, Privacy, and Security
|
| 69 |
+
|
| 70 |
+
5.1 Data Quality and Bias. Licensee shall use training data from lawful sources and is encouraged to conduct due diligence before deploying the Model Materials and to take reasonable steps to mitigate any known biases in its training data or applications.
|
| 71 |
+
|
| 72 |
+
5.2 Privacy Protection.
|
| 73 |
+
(a) Sensitive‑Data Restrictions. It is prohibited to use the Model Materials to process,or extract infer sensitive personal data protected under specific laws (such as GDPR or HIPAA), particularly when dealing with documents containing personally identifiable information (such as ID numbers, health data, financial information, etc.), unless Licensee has obtained all necessary consents, lawful basis, or authorizations, and has implemented adequate anonymization, pseudonymization, or other privacy-enhancing technologies.
|
| 74 |
+
(b) Data Minimization and Purpose Limitation. The Licensee shall follow the principle of data minimization when using the OCR Model, processing only the user data necessary for specific, explicit, and lawful purposes. Specifically, the OCR Model should avoid processing unnecessary sensitive data and ensure compliance with applicable privacy protection laws during data handling.
|
| 75 |
+
(c) Transparency. Licensee shall provide clear and transparent privacy policies and terms of use when processing user data, particularly during document scanning and information extraction. .
|
| 76 |
+
|
| 77 |
+
5.3 Security Measures. Licensee shall implement appropriate technical and administrative safeguards to protect the Model Materials and any associated data against unauthorized access, disclosure, alteration, or destruction. Such measures may include, but are not limited to, encryption, access controls, logging, and audit trails.
|
| 78 |
+
|
| 79 |
+
5.4 Further Training. Licensee may only use user‑provided input or Outputs for training, fine-tuning, or improving other AI models if it has obtained the specific and informed consent of data subjects.
|
| 80 |
+
|
| 81 |
+
6. Disclaimer of Warranty and Limitation of Liability
|
| 82 |
+
|
| 83 |
+
6.1 “AS IS” Basis. Unless required by applicable law, the Model Materials are provided on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. Licensee is solely responsible for determining the appropriateness of using or redistributing the Model Materials and assume any risks associated with the exercise of permissions under this License. Licensor does not provide any warranty of non-infringement but represents that no infringing code has been knowingly included.
|
| 84 |
+
|
| 85 |
+
6.2 Outputs Disclaimer. As a neutral technology, Licensor disclaims all liability for the accuracy, completeness, reliability, safety, legality, or suitability of any Outputs. The Licensee is solely responsible for verifying the accuracy and appropriateness of AI-generated content and shall provide appropriate disclosures when publishing or relying upon such content.
|
| 86 |
+
|
| 87 |
+
6.3 Limitation of Liability and Recourse. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall Licensor or contributors be liable for any claims, damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Model Materials (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Licensor has been advised of the possibility of such damages. If such losses are incurred, recourse may be sought against the Licensee responsible for causing the loss.
|
| 88 |
+
|
| 89 |
+
6.4 Content‑Filtering Disclaimer. Although the Model Materials may include content‑filtering mechanisms, Licensor makes no warranties of any kind regarding the stability, quality, accuracy, completeness, or any specific outcome of Outputs. Licensee is solely responsible for reviewing, verifying, and performing quality control on Outputs and assumes all associated risks and liabilities.
|
| 90 |
+
|
| 91 |
+
7. Attribution and License Reservation
|
| 92 |
+
|
| 93 |
+
7.1 License. When distributing or redistributing the Model Materials, Licensee must give any other recipients of the Model Materials a copy of this Agreement.
|
| 94 |
+
|
| 95 |
+
7.2 Copyright and Notices. When distributing any part of the Model Materials, Licensee must retain all copyright, patent, trademark, and attribution notices included in the Model Materials.
|
| 96 |
+
|
| 97 |
+
7.3 Attribution. Licensee is encouraged to prominently display the name of Licensor and the Model Materials in any public statements, products, or services that contain the Model Materials (or any derivative works thereof), to promote transparency and community trust. If Licensee distributes modified weights or fine‑tuned models based on the Model Materials, Licensee must prominently display the following statement in the related website or documentation: “Built with dots.ocr.”
|
| 98 |
+
|
| 99 |
+
8. Governing Law and Dispute Resolution
|
| 100 |
+
|
| 101 |
+
8.1 Governing Law. This Agreement shall be governed by and construed in accordance with the laws of the People’s Republic of China, without regard to its conflict of laws principles.
|
| 102 |
+
|
| 103 |
+
8.2 Dispute Resolution. Any dispute claim, or disagreement arising out of or relating to this Agreement shall first be resolved through amicable consultation. If such consultation fails, the dispute shall be submitted to the Hangzhou Arbitration Commission for arbitration. The arbitration shall be conducted in accordance with the laws of China, and the place of arbitration shall be [Hangzhou, China]. The arbitral award shall be final and binding upon both parties.
|
| 104 |
+
|
| 105 |
+
9. Regulatory Compliance Amendments
|
| 106 |
+
In the event that any part of this Agreement becomes invalid or requires adjustment due to changes in applicable laws or regulations, Licensor reserves the right to issue a revised version of this Agreement. Licensee shall migrate to the new version within [e.g., ninety (90)] days of its release; otherwise, all rights granted under this Agreement shall automatically terminate.
|
| 107 |
+
|
| 108 |
+
10. Security Reporting
|
| 109 |
+
Licensee discovering any security vulnerability in the Model Materials may report it to Licensor via: dots-feedback@xiaohongshu.com. Licensee shall not disclose vulnerability details until Licensor issues an official remediation, unless otherwise required by law.
|
generation_config.json
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"max_length": 32768,
|
| 3 |
+
"eos_token_id": [
|
| 4 |
+
151643,
|
| 5 |
+
151673
|
| 6 |
+
]
|
| 7 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1672627ab723b919f91fd7746a98b468d5ea574dd321951dffe9a77ceeeacbf9
|
| 3 |
+
size 4292758192
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5e445fd457fbaf392e68153fcbab0146af876c8498b0c763630d224647b0d5b5
|
| 3 |
+
size 1785673544
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,650 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 6078358528
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"lm_head.weight": "model-00001-of-00002.safetensors",
|
| 7 |
+
"model.embed_tokens.weight": "model-00001-of-00002.safetensors",
|
| 8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 32 |
+
"model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 44 |
+
"model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 56 |
+
"model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 68 |
+
"model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 80 |
+
"model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 92 |
+
"model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 104 |
+
"model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 116 |
+
"model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 128 |
+
"model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 140 |
+
"model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 164 |
+
"model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 176 |
+
"model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 188 |
+
"model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 200 |
+
"model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 212 |
+
"model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 224 |
+
"model.layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 236 |
+
"model.layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 248 |
+
"model.layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 260 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 261 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 262 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 263 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 264 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 265 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 266 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 267 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 268 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 269 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 270 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 271 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 272 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 273 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 274 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 275 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 276 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 277 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 278 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 279 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 280 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 281 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 282 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 283 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 284 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 285 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 286 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 287 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 288 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 289 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 290 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 291 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 292 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 293 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 294 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 295 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 296 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 297 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 298 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 299 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 300 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 301 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 302 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 303 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 304 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 305 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 306 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 307 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 308 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 309 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 310 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 311 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 312 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 313 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 314 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 315 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 316 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 317 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 318 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 319 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 320 |
+
"model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 321 |
+
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 322 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 323 |
+
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 324 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 325 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 326 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 327 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 328 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 329 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 330 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 331 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 332 |
+
"model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 333 |
+
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 334 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 335 |
+
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 336 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 337 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 338 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 339 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 340 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 341 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 342 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 343 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 344 |
+
"model.norm.weight": "model-00001-of-00002.safetensors",
|
| 345 |
+
"vision_tower.blocks.0.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 346 |
+
"vision_tower.blocks.0.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 347 |
+
"vision_tower.blocks.0.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 348 |
+
"vision_tower.blocks.0.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 349 |
+
"vision_tower.blocks.0.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 350 |
+
"vision_tower.blocks.0.norm1.weight": "model-00001-of-00002.safetensors",
|
| 351 |
+
"vision_tower.blocks.0.norm2.weight": "model-00001-of-00002.safetensors",
|
| 352 |
+
"vision_tower.blocks.1.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 353 |
+
"vision_tower.blocks.1.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 354 |
+
"vision_tower.blocks.1.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 355 |
+
"vision_tower.blocks.1.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 356 |
+
"vision_tower.blocks.1.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 357 |
+
"vision_tower.blocks.1.norm1.weight": "model-00001-of-00002.safetensors",
|
| 358 |
+
"vision_tower.blocks.1.norm2.weight": "model-00001-of-00002.safetensors",
|
| 359 |
+
"vision_tower.blocks.10.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 360 |
+
"vision_tower.blocks.10.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 361 |
+
"vision_tower.blocks.10.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 362 |
+
"vision_tower.blocks.10.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 363 |
+
"vision_tower.blocks.10.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 364 |
+
"vision_tower.blocks.10.norm1.weight": "model-00001-of-00002.safetensors",
|
| 365 |
+
"vision_tower.blocks.10.norm2.weight": "model-00001-of-00002.safetensors",
|
| 366 |
+
"vision_tower.blocks.11.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 367 |
+
"vision_tower.blocks.11.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 368 |
+
"vision_tower.blocks.11.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 369 |
+
"vision_tower.blocks.11.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 370 |
+
"vision_tower.blocks.11.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 371 |
+
"vision_tower.blocks.11.norm1.weight": "model-00001-of-00002.safetensors",
|
| 372 |
+
"vision_tower.blocks.11.norm2.weight": "model-00001-of-00002.safetensors",
|
| 373 |
+
"vision_tower.blocks.12.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 374 |
+
"vision_tower.blocks.12.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 375 |
+
"vision_tower.blocks.12.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 376 |
+
"vision_tower.blocks.12.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 377 |
+
"vision_tower.blocks.12.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 378 |
+
"vision_tower.blocks.12.norm1.weight": "model-00001-of-00002.safetensors",
|
| 379 |
+
"vision_tower.blocks.12.norm2.weight": "model-00001-of-00002.safetensors",
|
| 380 |
+
"vision_tower.blocks.13.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 381 |
+
"vision_tower.blocks.13.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 382 |
+
"vision_tower.blocks.13.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 383 |
+
"vision_tower.blocks.13.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 384 |
+
"vision_tower.blocks.13.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 385 |
+
"vision_tower.blocks.13.norm1.weight": "model-00001-of-00002.safetensors",
|
| 386 |
+
"vision_tower.blocks.13.norm2.weight": "model-00001-of-00002.safetensors",
|
| 387 |
+
"vision_tower.blocks.14.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 388 |
+
"vision_tower.blocks.14.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 389 |
+
"vision_tower.blocks.14.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 390 |
+
"vision_tower.blocks.14.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 391 |
+
"vision_tower.blocks.14.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 392 |
+
"vision_tower.blocks.14.norm1.weight": "model-00001-of-00002.safetensors",
|
| 393 |
+
"vision_tower.blocks.14.norm2.weight": "model-00001-of-00002.safetensors",
|
| 394 |
+
"vision_tower.blocks.15.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 395 |
+
"vision_tower.blocks.15.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 396 |
+
"vision_tower.blocks.15.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 397 |
+
"vision_tower.blocks.15.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 398 |
+
"vision_tower.blocks.15.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 399 |
+
"vision_tower.blocks.15.norm1.weight": "model-00001-of-00002.safetensors",
|
| 400 |
+
"vision_tower.blocks.15.norm2.weight": "model-00001-of-00002.safetensors",
|
| 401 |
+
"vision_tower.blocks.16.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 402 |
+
"vision_tower.blocks.16.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 403 |
+
"vision_tower.blocks.16.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 404 |
+
"vision_tower.blocks.16.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 405 |
+
"vision_tower.blocks.16.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 406 |
+
"vision_tower.blocks.16.norm1.weight": "model-00001-of-00002.safetensors",
|
| 407 |
+
"vision_tower.blocks.16.norm2.weight": "model-00001-of-00002.safetensors",
|
| 408 |
+
"vision_tower.blocks.17.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 409 |
+
"vision_tower.blocks.17.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 410 |
+
"vision_tower.blocks.17.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 411 |
+
"vision_tower.blocks.17.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 412 |
+
"vision_tower.blocks.17.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 413 |
+
"vision_tower.blocks.17.norm1.weight": "model-00001-of-00002.safetensors",
|
| 414 |
+
"vision_tower.blocks.17.norm2.weight": "model-00001-of-00002.safetensors",
|
| 415 |
+
"vision_tower.blocks.18.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 416 |
+
"vision_tower.blocks.18.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 417 |
+
"vision_tower.blocks.18.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 418 |
+
"vision_tower.blocks.18.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 419 |
+
"vision_tower.blocks.18.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 420 |
+
"vision_tower.blocks.18.norm1.weight": "model-00001-of-00002.safetensors",
|
| 421 |
+
"vision_tower.blocks.18.norm2.weight": "model-00001-of-00002.safetensors",
|
| 422 |
+
"vision_tower.blocks.19.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 423 |
+
"vision_tower.blocks.19.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 424 |
+
"vision_tower.blocks.19.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 425 |
+
"vision_tower.blocks.19.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 426 |
+
"vision_tower.blocks.19.mlp.fc3.weight": "model-00001-of-00002.safetensors",
|
| 427 |
+
"vision_tower.blocks.19.norm1.weight": "model-00001-of-00002.safetensors",
|
| 428 |
+
"vision_tower.blocks.19.norm2.weight": "model-00001-of-00002.safetensors",
|
| 429 |
+
"vision_tower.blocks.2.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 430 |
+
"vision_tower.blocks.2.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 431 |
+
"vision_tower.blocks.2.mlp.fc1.weight": "model-00001-of-00002.safetensors",
|
| 432 |
+
"vision_tower.blocks.2.mlp.fc2.weight": "model-00001-of-00002.safetensors",
|
| 433 |
+
"vision_tower.blocks.2.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 434 |
+
"vision_tower.blocks.2.norm1.weight": "model-00002-of-00002.safetensors",
|
| 435 |
+
"vision_tower.blocks.2.norm2.weight": "model-00002-of-00002.safetensors",
|
| 436 |
+
"vision_tower.blocks.20.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 437 |
+
"vision_tower.blocks.20.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 438 |
+
"vision_tower.blocks.20.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 439 |
+
"vision_tower.blocks.20.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 440 |
+
"vision_tower.blocks.20.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 441 |
+
"vision_tower.blocks.20.norm1.weight": "model-00002-of-00002.safetensors",
|
| 442 |
+
"vision_tower.blocks.20.norm2.weight": "model-00002-of-00002.safetensors",
|
| 443 |
+
"vision_tower.blocks.21.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 444 |
+
"vision_tower.blocks.21.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 445 |
+
"vision_tower.blocks.21.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 446 |
+
"vision_tower.blocks.21.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 447 |
+
"vision_tower.blocks.21.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 448 |
+
"vision_tower.blocks.21.norm1.weight": "model-00002-of-00002.safetensors",
|
| 449 |
+
"vision_tower.blocks.21.norm2.weight": "model-00002-of-00002.safetensors",
|
| 450 |
+
"vision_tower.blocks.22.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 451 |
+
"vision_tower.blocks.22.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 452 |
+
"vision_tower.blocks.22.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 453 |
+
"vision_tower.blocks.22.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 454 |
+
"vision_tower.blocks.22.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 455 |
+
"vision_tower.blocks.22.norm1.weight": "model-00002-of-00002.safetensors",
|
| 456 |
+
"vision_tower.blocks.22.norm2.weight": "model-00002-of-00002.safetensors",
|
| 457 |
+
"vision_tower.blocks.23.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 458 |
+
"vision_tower.blocks.23.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 459 |
+
"vision_tower.blocks.23.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 460 |
+
"vision_tower.blocks.23.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 461 |
+
"vision_tower.blocks.23.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 462 |
+
"vision_tower.blocks.23.norm1.weight": "model-00002-of-00002.safetensors",
|
| 463 |
+
"vision_tower.blocks.23.norm2.weight": "model-00002-of-00002.safetensors",
|
| 464 |
+
"vision_tower.blocks.24.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 465 |
+
"vision_tower.blocks.24.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 466 |
+
"vision_tower.blocks.24.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 467 |
+
"vision_tower.blocks.24.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 468 |
+
"vision_tower.blocks.24.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 469 |
+
"vision_tower.blocks.24.norm1.weight": "model-00002-of-00002.safetensors",
|
| 470 |
+
"vision_tower.blocks.24.norm2.weight": "model-00002-of-00002.safetensors",
|
| 471 |
+
"vision_tower.blocks.25.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 472 |
+
"vision_tower.blocks.25.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 473 |
+
"vision_tower.blocks.25.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 474 |
+
"vision_tower.blocks.25.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 475 |
+
"vision_tower.blocks.25.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 476 |
+
"vision_tower.blocks.25.norm1.weight": "model-00002-of-00002.safetensors",
|
| 477 |
+
"vision_tower.blocks.25.norm2.weight": "model-00002-of-00002.safetensors",
|
| 478 |
+
"vision_tower.blocks.26.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 479 |
+
"vision_tower.blocks.26.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 480 |
+
"vision_tower.blocks.26.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 481 |
+
"vision_tower.blocks.26.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 482 |
+
"vision_tower.blocks.26.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 483 |
+
"vision_tower.blocks.26.norm1.weight": "model-00002-of-00002.safetensors",
|
| 484 |
+
"vision_tower.blocks.26.norm2.weight": "model-00002-of-00002.safetensors",
|
| 485 |
+
"vision_tower.blocks.27.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 486 |
+
"vision_tower.blocks.27.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 487 |
+
"vision_tower.blocks.27.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 488 |
+
"vision_tower.blocks.27.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 489 |
+
"vision_tower.blocks.27.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 490 |
+
"vision_tower.blocks.27.norm1.weight": "model-00002-of-00002.safetensors",
|
| 491 |
+
"vision_tower.blocks.27.norm2.weight": "model-00002-of-00002.safetensors",
|
| 492 |
+
"vision_tower.blocks.28.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 493 |
+
"vision_tower.blocks.28.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 494 |
+
"vision_tower.blocks.28.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 495 |
+
"vision_tower.blocks.28.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 496 |
+
"vision_tower.blocks.28.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 497 |
+
"vision_tower.blocks.28.norm1.weight": "model-00002-of-00002.safetensors",
|
| 498 |
+
"vision_tower.blocks.28.norm2.weight": "model-00002-of-00002.safetensors",
|
| 499 |
+
"vision_tower.blocks.29.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 500 |
+
"vision_tower.blocks.29.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 501 |
+
"vision_tower.blocks.29.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 502 |
+
"vision_tower.blocks.29.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 503 |
+
"vision_tower.blocks.29.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 504 |
+
"vision_tower.blocks.29.norm1.weight": "model-00002-of-00002.safetensors",
|
| 505 |
+
"vision_tower.blocks.29.norm2.weight": "model-00002-of-00002.safetensors",
|
| 506 |
+
"vision_tower.blocks.3.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 507 |
+
"vision_tower.blocks.3.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 508 |
+
"vision_tower.blocks.3.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 509 |
+
"vision_tower.blocks.3.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 510 |
+
"vision_tower.blocks.3.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 511 |
+
"vision_tower.blocks.3.norm1.weight": "model-00002-of-00002.safetensors",
|
| 512 |
+
"vision_tower.blocks.3.norm2.weight": "model-00002-of-00002.safetensors",
|
| 513 |
+
"vision_tower.blocks.30.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 514 |
+
"vision_tower.blocks.30.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 515 |
+
"vision_tower.blocks.30.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 516 |
+
"vision_tower.blocks.30.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 517 |
+
"vision_tower.blocks.30.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 518 |
+
"vision_tower.blocks.30.norm1.weight": "model-00002-of-00002.safetensors",
|
| 519 |
+
"vision_tower.blocks.30.norm2.weight": "model-00002-of-00002.safetensors",
|
| 520 |
+
"vision_tower.blocks.31.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 521 |
+
"vision_tower.blocks.31.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 522 |
+
"vision_tower.blocks.31.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 523 |
+
"vision_tower.blocks.31.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 524 |
+
"vision_tower.blocks.31.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 525 |
+
"vision_tower.blocks.31.norm1.weight": "model-00002-of-00002.safetensors",
|
| 526 |
+
"vision_tower.blocks.31.norm2.weight": "model-00002-of-00002.safetensors",
|
| 527 |
+
"vision_tower.blocks.32.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 528 |
+
"vision_tower.blocks.32.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 529 |
+
"vision_tower.blocks.32.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 530 |
+
"vision_tower.blocks.32.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 531 |
+
"vision_tower.blocks.32.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 532 |
+
"vision_tower.blocks.32.norm1.weight": "model-00002-of-00002.safetensors",
|
| 533 |
+
"vision_tower.blocks.32.norm2.weight": "model-00002-of-00002.safetensors",
|
| 534 |
+
"vision_tower.blocks.33.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 535 |
+
"vision_tower.blocks.33.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 536 |
+
"vision_tower.blocks.33.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 537 |
+
"vision_tower.blocks.33.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 538 |
+
"vision_tower.blocks.33.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 539 |
+
"vision_tower.blocks.33.norm1.weight": "model-00002-of-00002.safetensors",
|
| 540 |
+
"vision_tower.blocks.33.norm2.weight": "model-00002-of-00002.safetensors",
|
| 541 |
+
"vision_tower.blocks.34.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 542 |
+
"vision_tower.blocks.34.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 543 |
+
"vision_tower.blocks.34.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 544 |
+
"vision_tower.blocks.34.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 545 |
+
"vision_tower.blocks.34.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 546 |
+
"vision_tower.blocks.34.norm1.weight": "model-00002-of-00002.safetensors",
|
| 547 |
+
"vision_tower.blocks.34.norm2.weight": "model-00002-of-00002.safetensors",
|
| 548 |
+
"vision_tower.blocks.35.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 549 |
+
"vision_tower.blocks.35.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 550 |
+
"vision_tower.blocks.35.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 551 |
+
"vision_tower.blocks.35.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 552 |
+
"vision_tower.blocks.35.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 553 |
+
"vision_tower.blocks.35.norm1.weight": "model-00002-of-00002.safetensors",
|
| 554 |
+
"vision_tower.blocks.35.norm2.weight": "model-00002-of-00002.safetensors",
|
| 555 |
+
"vision_tower.blocks.36.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 556 |
+
"vision_tower.blocks.36.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 557 |
+
"vision_tower.blocks.36.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 558 |
+
"vision_tower.blocks.36.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 559 |
+
"vision_tower.blocks.36.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 560 |
+
"vision_tower.blocks.36.norm1.weight": "model-00002-of-00002.safetensors",
|
| 561 |
+
"vision_tower.blocks.36.norm2.weight": "model-00002-of-00002.safetensors",
|
| 562 |
+
"vision_tower.blocks.37.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 563 |
+
"vision_tower.blocks.37.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 564 |
+
"vision_tower.blocks.37.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 565 |
+
"vision_tower.blocks.37.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 566 |
+
"vision_tower.blocks.37.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 567 |
+
"vision_tower.blocks.37.norm1.weight": "model-00002-of-00002.safetensors",
|
| 568 |
+
"vision_tower.blocks.37.norm2.weight": "model-00002-of-00002.safetensors",
|
| 569 |
+
"vision_tower.blocks.38.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 570 |
+
"vision_tower.blocks.38.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 571 |
+
"vision_tower.blocks.38.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 572 |
+
"vision_tower.blocks.38.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 573 |
+
"vision_tower.blocks.38.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 574 |
+
"vision_tower.blocks.38.norm1.weight": "model-00002-of-00002.safetensors",
|
| 575 |
+
"vision_tower.blocks.38.norm2.weight": "model-00002-of-00002.safetensors",
|
| 576 |
+
"vision_tower.blocks.39.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 577 |
+
"vision_tower.blocks.39.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 578 |
+
"vision_tower.blocks.39.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 579 |
+
"vision_tower.blocks.39.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 580 |
+
"vision_tower.blocks.39.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 581 |
+
"vision_tower.blocks.39.norm1.weight": "model-00002-of-00002.safetensors",
|
| 582 |
+
"vision_tower.blocks.39.norm2.weight": "model-00002-of-00002.safetensors",
|
| 583 |
+
"vision_tower.blocks.4.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 584 |
+
"vision_tower.blocks.4.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 585 |
+
"vision_tower.blocks.4.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 586 |
+
"vision_tower.blocks.4.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 587 |
+
"vision_tower.blocks.4.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 588 |
+
"vision_tower.blocks.4.norm1.weight": "model-00002-of-00002.safetensors",
|
| 589 |
+
"vision_tower.blocks.4.norm2.weight": "model-00002-of-00002.safetensors",
|
| 590 |
+
"vision_tower.blocks.40.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 591 |
+
"vision_tower.blocks.40.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 592 |
+
"vision_tower.blocks.40.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 593 |
+
"vision_tower.blocks.40.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 594 |
+
"vision_tower.blocks.40.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 595 |
+
"vision_tower.blocks.40.norm1.weight": "model-00002-of-00002.safetensors",
|
| 596 |
+
"vision_tower.blocks.40.norm2.weight": "model-00002-of-00002.safetensors",
|
| 597 |
+
"vision_tower.blocks.41.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 598 |
+
"vision_tower.blocks.41.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 599 |
+
"vision_tower.blocks.41.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 600 |
+
"vision_tower.blocks.41.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 601 |
+
"vision_tower.blocks.41.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 602 |
+
"vision_tower.blocks.41.norm1.weight": "model-00002-of-00002.safetensors",
|
| 603 |
+
"vision_tower.blocks.41.norm2.weight": "model-00002-of-00002.safetensors",
|
| 604 |
+
"vision_tower.blocks.5.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 605 |
+
"vision_tower.blocks.5.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 606 |
+
"vision_tower.blocks.5.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 607 |
+
"vision_tower.blocks.5.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 608 |
+
"vision_tower.blocks.5.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 609 |
+
"vision_tower.blocks.5.norm1.weight": "model-00002-of-00002.safetensors",
|
| 610 |
+
"vision_tower.blocks.5.norm2.weight": "model-00002-of-00002.safetensors",
|
| 611 |
+
"vision_tower.blocks.6.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 612 |
+
"vision_tower.blocks.6.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 613 |
+
"vision_tower.blocks.6.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 614 |
+
"vision_tower.blocks.6.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 615 |
+
"vision_tower.blocks.6.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 616 |
+
"vision_tower.blocks.6.norm1.weight": "model-00002-of-00002.safetensors",
|
| 617 |
+
"vision_tower.blocks.6.norm2.weight": "model-00002-of-00002.safetensors",
|
| 618 |
+
"vision_tower.blocks.7.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 619 |
+
"vision_tower.blocks.7.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 620 |
+
"vision_tower.blocks.7.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 621 |
+
"vision_tower.blocks.7.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 622 |
+
"vision_tower.blocks.7.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 623 |
+
"vision_tower.blocks.7.norm1.weight": "model-00002-of-00002.safetensors",
|
| 624 |
+
"vision_tower.blocks.7.norm2.weight": "model-00002-of-00002.safetensors",
|
| 625 |
+
"vision_tower.blocks.8.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 626 |
+
"vision_tower.blocks.8.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 627 |
+
"vision_tower.blocks.8.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 628 |
+
"vision_tower.blocks.8.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 629 |
+
"vision_tower.blocks.8.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 630 |
+
"vision_tower.blocks.8.norm1.weight": "model-00002-of-00002.safetensors",
|
| 631 |
+
"vision_tower.blocks.8.norm2.weight": "model-00002-of-00002.safetensors",
|
| 632 |
+
"vision_tower.blocks.9.attn.proj.weight": "model-00002-of-00002.safetensors",
|
| 633 |
+
"vision_tower.blocks.9.attn.qkv.weight": "model-00002-of-00002.safetensors",
|
| 634 |
+
"vision_tower.blocks.9.mlp.fc1.weight": "model-00002-of-00002.safetensors",
|
| 635 |
+
"vision_tower.blocks.9.mlp.fc2.weight": "model-00002-of-00002.safetensors",
|
| 636 |
+
"vision_tower.blocks.9.mlp.fc3.weight": "model-00002-of-00002.safetensors",
|
| 637 |
+
"vision_tower.blocks.9.norm1.weight": "model-00002-of-00002.safetensors",
|
| 638 |
+
"vision_tower.blocks.9.norm2.weight": "model-00002-of-00002.safetensors",
|
| 639 |
+
"vision_tower.merger.ln_q.bias": "model-00002-of-00002.safetensors",
|
| 640 |
+
"vision_tower.merger.ln_q.weight": "model-00002-of-00002.safetensors",
|
| 641 |
+
"vision_tower.merger.mlp.0.bias": "model-00002-of-00002.safetensors",
|
| 642 |
+
"vision_tower.merger.mlp.0.weight": "model-00002-of-00002.safetensors",
|
| 643 |
+
"vision_tower.merger.mlp.2.bias": "model-00002-of-00002.safetensors",
|
| 644 |
+
"vision_tower.merger.mlp.2.weight": "model-00002-of-00002.safetensors",
|
| 645 |
+
"vision_tower.patch_embed.patchifier.norm.weight": "model-00002-of-00002.safetensors",
|
| 646 |
+
"vision_tower.patch_embed.patchifier.proj.bias": "model-00002-of-00002.safetensors",
|
| 647 |
+
"vision_tower.patch_embed.patchifier.proj.weight": "model-00002-of-00002.safetensors",
|
| 648 |
+
"vision_tower.post_trunk_norm.weight": "model-00002-of-00002.safetensors"
|
| 649 |
+
}
|
| 650 |
+
}
|
modeling_dots_ocr.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
import torch
|
| 4 |
+
from transformers.modeling_outputs import CausalLMOutputWithPast
|
| 5 |
+
from transformers.models.qwen2 import Qwen2ForCausalLM
|
| 6 |
+
|
| 7 |
+
from .configuration_dots import DotsVisionConfig, DotsOCRConfig
|
| 8 |
+
from .modeling_dots_vision import DotsVisionTransformer
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
DOTS_VLM_MAX_IMAGES = 200
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class DotsOCRForCausalLM(Qwen2ForCausalLM):
|
| 15 |
+
config_class = DotsOCRConfig
|
| 16 |
+
|
| 17 |
+
def __init__(self, config: DotsOCRConfig):
|
| 18 |
+
super().__init__(config)
|
| 19 |
+
|
| 20 |
+
if isinstance(self.config.vision_config, dict):
|
| 21 |
+
vision_config = DotsVisionConfig(**self.config.vision_config)
|
| 22 |
+
self.config.vision_config = vision_config
|
| 23 |
+
else:
|
| 24 |
+
vision_config = self.config.vision_config
|
| 25 |
+
|
| 26 |
+
self.vision_tower = DotsVisionTransformer(vision_config)
|
| 27 |
+
|
| 28 |
+
def prepare_inputs_embeds(
|
| 29 |
+
self,
|
| 30 |
+
input_ids: torch.LongTensor,
|
| 31 |
+
pixel_values: Optional[torch.FloatTensor] = None,
|
| 32 |
+
grid_thw: Optional[torch.FloatTensor] = None,
|
| 33 |
+
img_mask: Optional[torch.BoolTensor] = None,
|
| 34 |
+
) -> torch.Tensor:
|
| 35 |
+
inputs_embeds = self.get_input_embeddings()(input_ids)
|
| 36 |
+
|
| 37 |
+
if pixel_values is not None:
|
| 38 |
+
assert img_mask is not None
|
| 39 |
+
if grid_thw.shape[0] > DOTS_VLM_MAX_IMAGES:
|
| 40 |
+
print(
|
| 41 |
+
f"Num image exceeded: {grid_thw.shape[0]} > {DOTS_VLM_MAX_IMAGES}, which may cause FSDP hang"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
vision_embeddings = self.vision_tower(pixel_values, grid_thw)
|
| 45 |
+
|
| 46 |
+
true_indices = torch.nonzero(img_mask).squeeze()
|
| 47 |
+
if len(true_indices) > vision_embeddings.size(0):
|
| 48 |
+
print(
|
| 49 |
+
f"img_mask sum > VE and will be truncated, mask.sum()={len(true_indices)} {vision_embeddings.size(0)=}"
|
| 50 |
+
)
|
| 51 |
+
true_indices = true_indices[: vision_embeddings.size(0)]
|
| 52 |
+
new_img_mask = torch.zeros_like(img_mask, device=img_mask.device)
|
| 53 |
+
new_img_mask[true_indices[:, 0], true_indices[:, 1]] = True
|
| 54 |
+
else:
|
| 55 |
+
new_img_mask = img_mask
|
| 56 |
+
|
| 57 |
+
assert (
|
| 58 |
+
vision_embeddings.size(0) == new_img_mask.sum()
|
| 59 |
+
), f"{vision_embeddings.size(0)=}, {new_img_mask.sum()=}"
|
| 60 |
+
|
| 61 |
+
inputs_embeds = inputs_embeds.masked_scatter(
|
| 62 |
+
new_img_mask.to(inputs_embeds.device).unsqueeze(-1).expand_as(inputs_embeds),
|
| 63 |
+
vision_embeddings.to(inputs_embeds.device).type(inputs_embeds.dtype),
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
return inputs_embeds
|
| 67 |
+
|
| 68 |
+
def forward(
|
| 69 |
+
self,
|
| 70 |
+
input_ids: torch.LongTensor,
|
| 71 |
+
pixel_values: Optional[torch.FloatTensor] = None,
|
| 72 |
+
image_grid_thw: Optional[torch.FloatTensor] = None,
|
| 73 |
+
inputs_embeds: Optional[torch.Tensor] = None,
|
| 74 |
+
attention_mask: Optional[torch.Tensor] = None,
|
| 75 |
+
position_ids: Optional[torch.LongTensor] = None,
|
| 76 |
+
past_key_values: Optional[List[torch.FloatTensor]] = None,
|
| 77 |
+
labels: Optional[torch.LongTensor] = None,
|
| 78 |
+
output_attentions: Optional[bool] = None,
|
| 79 |
+
output_hidden_states: Optional[bool] = None,
|
| 80 |
+
return_dict: Optional[bool] = None,
|
| 81 |
+
use_cache: Optional[bool] = None,
|
| 82 |
+
logits_to_keep: int = 0,
|
| 83 |
+
**loss_kwargs,
|
| 84 |
+
) -> Union[Tuple, CausalLMOutputWithPast]:
|
| 85 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
| 86 |
+
assert len(input_ids) >= 1, f"empty input_ids {input_ids.shape=} will cause gradnorm nan"
|
| 87 |
+
if inputs_embeds is None:
|
| 88 |
+
img_mask = input_ids == self.config.image_token_id
|
| 89 |
+
inputs_embeds = self.prepare_inputs_embeds(input_ids, pixel_values, image_grid_thw, img_mask)
|
| 90 |
+
|
| 91 |
+
outputs = super().forward(
|
| 92 |
+
inputs_embeds=inputs_embeds,
|
| 93 |
+
attention_mask=attention_mask,
|
| 94 |
+
position_ids=position_ids,
|
| 95 |
+
past_key_values=past_key_values,
|
| 96 |
+
labels=labels,
|
| 97 |
+
use_cache=use_cache if use_cache is not None else self.config.use_cache,
|
| 98 |
+
output_attentions=output_attentions,
|
| 99 |
+
output_hidden_states=output_hidden_states,
|
| 100 |
+
# return_dict=return_dict,
|
| 101 |
+
logits_to_keep=logits_to_keep,
|
| 102 |
+
**loss_kwargs,
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
return outputs
|
| 106 |
+
|
| 107 |
+
def prepare_inputs_for_generation(
|
| 108 |
+
self,
|
| 109 |
+
input_ids,
|
| 110 |
+
past_key_values=None,
|
| 111 |
+
inputs_embeds=None,
|
| 112 |
+
pixel_values=None,
|
| 113 |
+
attention_mask=None,
|
| 114 |
+
cache_position=None,
|
| 115 |
+
num_logits_to_keep=None,
|
| 116 |
+
**kwargs,
|
| 117 |
+
):
|
| 118 |
+
model_inputs = super().prepare_inputs_for_generation(
|
| 119 |
+
input_ids,
|
| 120 |
+
past_key_values=past_key_values,
|
| 121 |
+
inputs_embeds=inputs_embeds,
|
| 122 |
+
attention_mask=attention_mask,
|
| 123 |
+
cache_position=cache_position,
|
| 124 |
+
num_logits_to_keep=num_logits_to_keep,
|
| 125 |
+
**kwargs,
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
if cache_position[0] == 0:
|
| 129 |
+
model_inputs["pixel_values"] = pixel_values
|
| 130 |
+
|
| 131 |
+
return model_inputs
|
modeling_dots_ocr_vllm.py
ADDED
|
@@ -0,0 +1,451 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import cached_property
|
| 2 |
+
from typing import Iterable, Literal, Mapping, Optional, Set, Tuple, TypedDict, Union
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn as nn
|
| 6 |
+
from transformers.models.qwen2_vl import Qwen2VLImageProcessor, Qwen2VLProcessor
|
| 7 |
+
from transformers.models.qwen2_vl.image_processing_qwen2_vl import smart_resize
|
| 8 |
+
from vllm import ModelRegistry
|
| 9 |
+
from vllm.config import VllmConfig
|
| 10 |
+
from vllm.model_executor.layers.sampler import SamplerOutput, get_sampler
|
| 11 |
+
from vllm.model_executor.models.interfaces import MultiModalEmbeddings, SupportsMultiModal
|
| 12 |
+
from vllm.model_executor.models.qwen2 import Qwen2ForCausalLM
|
| 13 |
+
from vllm.model_executor.models.qwen2_5_vl import (
|
| 14 |
+
Qwen2_5_VLMultiModalProcessor,
|
| 15 |
+
Qwen2_5_VLProcessingInfo,
|
| 16 |
+
)
|
| 17 |
+
from vllm.model_executor.models.qwen2_vl import Qwen2VLDummyInputsBuilder
|
| 18 |
+
from vllm.model_executor.models.utils import (
|
| 19 |
+
AutoWeightsLoader,
|
| 20 |
+
WeightsMapper,
|
| 21 |
+
init_vllm_registered_model,
|
| 22 |
+
maybe_prefix,
|
| 23 |
+
merge_multimodal_embeddings,
|
| 24 |
+
)
|
| 25 |
+
from vllm.model_executor.sampling_metadata import SamplingMetadata
|
| 26 |
+
from vllm.multimodal import MULTIMODAL_REGISTRY
|
| 27 |
+
from vllm.multimodal.inputs import MultiModalDataDict
|
| 28 |
+
from vllm.multimodal.parse import ImageSize
|
| 29 |
+
from vllm.sequence import IntermediateTensors
|
| 30 |
+
|
| 31 |
+
from .configuration_dots import DotsVisionConfig
|
| 32 |
+
from .configuration_dots import DotsOCRConfig
|
| 33 |
+
from .modeling_dots_vision import DotsVisionTransformer
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class DotsOCRImagePixelInputs(TypedDict):
|
| 37 |
+
type: Literal["pixel_values", "image_grid_thw"]
|
| 38 |
+
|
| 39 |
+
pixel_values: torch.Tensor
|
| 40 |
+
image_grid_thw: torch.Tensor
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class DotsOCRImageEmbeddingInputs(TypedDict):
|
| 44 |
+
type: Literal["image_embeds", "image_grid_thw"]
|
| 45 |
+
image_embeds: torch.Tensor
|
| 46 |
+
"""Supported types:
|
| 47 |
+
- List[`torch.Tensor`]: A list of tensors holding all images' features.
|
| 48 |
+
Each tensor holds an image's features.
|
| 49 |
+
- `torch.Tensor`: A tensor holding all images' features
|
| 50 |
+
(concatenation of all images' feature tensors).
|
| 51 |
+
|
| 52 |
+
Tensor shape: `(num_image_features, hidden_size)`
|
| 53 |
+
- `num_image_features` varies based on
|
| 54 |
+
the number and resolution of the images.
|
| 55 |
+
- `hidden_size` must match the hidden size of language model backbone.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
image_grid_thw: torch.Tensor
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
DotsOCRImageInputs = Union[DotsOCRImagePixelInputs, DotsOCRImageEmbeddingInputs]
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class DotsOCRMultiModalProcessor(Qwen2_5_VLMultiModalProcessor):
|
| 65 |
+
pass
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class DotsOCRDummyInputsBuilder(Qwen2VLDummyInputsBuilder):
|
| 69 |
+
def get_dummy_mm_data(
|
| 70 |
+
self,
|
| 71 |
+
seq_len: int,
|
| 72 |
+
mm_counts: Mapping[str, int],
|
| 73 |
+
) -> MultiModalDataDict:
|
| 74 |
+
num_images = mm_counts.get("image", 0)
|
| 75 |
+
|
| 76 |
+
target_width, target_height = self.info.get_image_size_with_most_features()
|
| 77 |
+
|
| 78 |
+
return {
|
| 79 |
+
"image": self._get_dummy_images(width=target_width, height=target_height, num_images=num_images),
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class DotsOCRProcessingInfo(Qwen2_5_VLProcessingInfo):
|
| 84 |
+
def get_hf_config(self) -> DotsOCRConfig:
|
| 85 |
+
config = self.ctx.get_hf_config()
|
| 86 |
+
if not config.__class__.__name__ == 'DotsOCRConfig':
|
| 87 |
+
raise TypeError(f"Expected DotsOCRConfig, got {type(config)}")
|
| 88 |
+
|
| 89 |
+
if hasattr(config, "vision_config") and isinstance(config.vision_config, dict):
|
| 90 |
+
config.vision_config = DotsVisionConfig(**config.vision_config)
|
| 91 |
+
|
| 92 |
+
return config
|
| 93 |
+
|
| 94 |
+
def get_supported_mm_limits(self) -> Mapping[str, Optional[int]]:
|
| 95 |
+
return {"image": None, "video": 0}
|
| 96 |
+
|
| 97 |
+
def get_mm_max_tokens_per_item(
|
| 98 |
+
self,
|
| 99 |
+
seq_len: int,
|
| 100 |
+
mm_counts: Mapping[str, int],
|
| 101 |
+
) -> Mapping[str, int]:
|
| 102 |
+
max_image_tokens = self.get_max_image_tokens()
|
| 103 |
+
return {"image": max_image_tokens, "video": 0}
|
| 104 |
+
|
| 105 |
+
def get_hf_processor(
|
| 106 |
+
self,
|
| 107 |
+
*,
|
| 108 |
+
min_pixels: Optional[int] = None,
|
| 109 |
+
max_pixels: Optional[int] = None,
|
| 110 |
+
size: Optional[dict[str, int]] = None,
|
| 111 |
+
**kwargs: object,
|
| 112 |
+
) -> Qwen2VLProcessor:
|
| 113 |
+
self.get_tokenizer().image_token = "<|imgpad|>" # Ensure image token is set
|
| 114 |
+
processor = self.ctx.get_hf_processor(
|
| 115 |
+
Qwen2VLProcessor,
|
| 116 |
+
image_processor=self.get_image_processor(min_pixels=min_pixels, max_pixels=max_pixels, size=size),
|
| 117 |
+
**kwargs,
|
| 118 |
+
)
|
| 119 |
+
processor.image_token = "<|imgpad|>"
|
| 120 |
+
processor.video_token = "<|video_pad|>"
|
| 121 |
+
return processor
|
| 122 |
+
|
| 123 |
+
def _get_vision_info(
|
| 124 |
+
self,
|
| 125 |
+
*,
|
| 126 |
+
image_width: int,
|
| 127 |
+
image_height: int,
|
| 128 |
+
num_frames: int = 1,
|
| 129 |
+
do_resize: bool = True,
|
| 130 |
+
image_processor: Optional[Qwen2VLImageProcessor],
|
| 131 |
+
) -> tuple[ImageSize, int]:
|
| 132 |
+
if image_processor is None:
|
| 133 |
+
image_processor = self.get_image_processor()
|
| 134 |
+
|
| 135 |
+
hf_config: DotsOCRConfig = self.get_hf_config()
|
| 136 |
+
vision_config = hf_config.vision_config
|
| 137 |
+
patch_size = vision_config.patch_size
|
| 138 |
+
merge_size = vision_config.spatial_merge_size
|
| 139 |
+
temporal_patch_size = vision_config.temporal_patch_size
|
| 140 |
+
|
| 141 |
+
if do_resize:
|
| 142 |
+
resized_height, resized_width = smart_resize(
|
| 143 |
+
height=image_height,
|
| 144 |
+
width=image_width,
|
| 145 |
+
factor=patch_size * merge_size,
|
| 146 |
+
min_pixels=image_processor.min_pixels,
|
| 147 |
+
max_pixels=image_processor.max_pixels,
|
| 148 |
+
)
|
| 149 |
+
preprocessed_size = ImageSize(width=resized_width, height=resized_height)
|
| 150 |
+
else:
|
| 151 |
+
preprocessed_size = ImageSize(width=image_width, height=image_height)
|
| 152 |
+
|
| 153 |
+
# NOTE: Frames are padded to be divisible by `temporal_patch_size`
|
| 154 |
+
# https://github.com/huggingface/transformers/blob/v4.48.3/src/transformers/models/qwen2_vl/image_processing_qwen2_vl.py#L294
|
| 155 |
+
padded_num_frames = num_frames + num_frames % temporal_patch_size
|
| 156 |
+
|
| 157 |
+
grid_t = max(padded_num_frames // temporal_patch_size, 1)
|
| 158 |
+
grid_h = preprocessed_size.height // patch_size
|
| 159 |
+
grid_w = preprocessed_size.width // patch_size
|
| 160 |
+
|
| 161 |
+
num_patches = grid_t * grid_h * grid_w
|
| 162 |
+
num_vision_tokens = num_patches // (merge_size**2)
|
| 163 |
+
|
| 164 |
+
return preprocessed_size, num_vision_tokens
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
@MULTIMODAL_REGISTRY.register_processor(
|
| 168 |
+
Qwen2_5_VLMultiModalProcessor,
|
| 169 |
+
info=DotsOCRProcessingInfo,
|
| 170 |
+
dummy_inputs=DotsOCRDummyInputsBuilder,
|
| 171 |
+
)
|
| 172 |
+
class DotsOCRForCausalLM(nn.Module, SupportsMultiModal):
|
| 173 |
+
hf_to_vllm_mapper = WeightsMapper(
|
| 174 |
+
orig_to_new_prefix={
|
| 175 |
+
"lm_head.": "language_model.lm_head.",
|
| 176 |
+
"model.": "language_model.model.",
|
| 177 |
+
}
|
| 178 |
+
)
|
| 179 |
+
_tp_plan = {}
|
| 180 |
+
|
| 181 |
+
@classmethod
|
| 182 |
+
def get_placeholder_str(cls, modality: str, i: int) -> Optional[str]:
|
| 183 |
+
if modality in ("image",):
|
| 184 |
+
return "<|img|><|imgpad|><|endofimg|>"
|
| 185 |
+
|
| 186 |
+
def __init__(self, *, vllm_config: VllmConfig, prefix: str = ""):
|
| 187 |
+
super().__init__()
|
| 188 |
+
|
| 189 |
+
self.config: DotsOCRConfig = vllm_config.model_config.hf_config
|
| 190 |
+
self.quant_config = vllm_config.quant_config
|
| 191 |
+
self.multimodal_config = vllm_config.model_config.multimodal_config
|
| 192 |
+
|
| 193 |
+
if isinstance(self.config.vision_config, dict):
|
| 194 |
+
vision_config = DotsVisionConfig(**self.config.vision_config)
|
| 195 |
+
self.config.vision_config = vision_config
|
| 196 |
+
else:
|
| 197 |
+
vision_config = self.config.vision_config
|
| 198 |
+
|
| 199 |
+
self.vision_tower = DotsVisionTransformer(vision_config)
|
| 200 |
+
self.language_model: Qwen2ForCausalLM = init_vllm_registered_model(
|
| 201 |
+
vllm_config=vllm_config,
|
| 202 |
+
hf_config=self.config,
|
| 203 |
+
prefix=maybe_prefix(prefix, "language_model"),
|
| 204 |
+
architectures=["Qwen2ForCausalLM"],
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
@cached_property
|
| 208 |
+
def sampler(self):
|
| 209 |
+
if hasattr(self.language_model, "sampler"):
|
| 210 |
+
return self.language_model.sampler
|
| 211 |
+
|
| 212 |
+
return get_sampler()
|
| 213 |
+
|
| 214 |
+
def _validate_and_reshape_mm_tensor(self, mm_input: object, name: str) -> torch.Tensor:
|
| 215 |
+
if not isinstance(mm_input, (torch.Tensor, list)):
|
| 216 |
+
raise ValueError(f"Incorrect type of {name}. " f"Got type: {type(mm_input)}")
|
| 217 |
+
if isinstance(mm_input, torch.Tensor):
|
| 218 |
+
if mm_input.ndim == 2:
|
| 219 |
+
return mm_input
|
| 220 |
+
if mm_input.ndim != 3:
|
| 221 |
+
raise ValueError(
|
| 222 |
+
f"{name} should be 2D or batched 3D tensor. "
|
| 223 |
+
f"Got ndim: {mm_input.ndim} "
|
| 224 |
+
f"(shape={mm_input.shape})"
|
| 225 |
+
)
|
| 226 |
+
return torch.concat(list(mm_input))
|
| 227 |
+
else:
|
| 228 |
+
return torch.concat(mm_input)
|
| 229 |
+
|
| 230 |
+
def _parse_and_validate_image_input(self, **kwargs: object) -> Optional[DotsOCRImageInputs]:
|
| 231 |
+
pixel_values = kwargs.pop("pixel_values", None)
|
| 232 |
+
image_embeds = kwargs.pop("image_embeds", None)
|
| 233 |
+
image_grid_thw = kwargs.pop("image_grid_thw", None)
|
| 234 |
+
|
| 235 |
+
if pixel_values is None and image_embeds is None:
|
| 236 |
+
return None
|
| 237 |
+
|
| 238 |
+
if pixel_values is not None:
|
| 239 |
+
pixel_values = self._validate_and_reshape_mm_tensor(pixel_values, "image pixel values")
|
| 240 |
+
image_grid_thw = self._validate_and_reshape_mm_tensor(image_grid_thw, "image grid_thw")
|
| 241 |
+
|
| 242 |
+
if not isinstance(pixel_values, (torch.Tensor, list)):
|
| 243 |
+
raise ValueError("Incorrect type of image pixel values. " f"Got type: {type(pixel_values)}")
|
| 244 |
+
|
| 245 |
+
return DotsOCRImagePixelInputs(
|
| 246 |
+
type="pixel_values", pixel_values=pixel_values, image_grid_thw=image_grid_thw
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
if image_embeds is not None:
|
| 250 |
+
image_embeds = self._validate_and_reshape_mm_tensor(image_embeds, "image embeds")
|
| 251 |
+
image_grid_thw = self._validate_and_reshape_mm_tensor(image_grid_thw, "image grid_thw")
|
| 252 |
+
|
| 253 |
+
if not isinstance(image_embeds, torch.Tensor):
|
| 254 |
+
raise ValueError("Incorrect type of image embeddings. " f"Got type: {type(image_embeds)}")
|
| 255 |
+
return DotsOCRImageEmbeddingInputs(
|
| 256 |
+
type="image_embeds", image_embeds=image_embeds, image_grid_thw=image_grid_thw
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
def vision_forward(self, pixel_values: torch.Tensor, image_grid_thw: torch.Tensor):
|
| 260 |
+
from vllm.distributed import (
|
| 261 |
+
get_tensor_model_parallel_group,
|
| 262 |
+
get_tensor_model_parallel_rank,
|
| 263 |
+
get_tensor_model_parallel_world_size,
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
assert self.vision_tower is not None
|
| 267 |
+
|
| 268 |
+
tp_rank = get_tensor_model_parallel_rank()
|
| 269 |
+
tp = get_tensor_model_parallel_world_size()
|
| 270 |
+
|
| 271 |
+
image_grid_thw_chunk = image_grid_thw.chunk(tp)
|
| 272 |
+
image_sizes_consum = torch.tensor([i.prod(-1).sum() for i in image_grid_thw_chunk]).cumsum(dim=0)
|
| 273 |
+
merge_size_square = self.vision_tower.config.spatial_merge_size**2
|
| 274 |
+
image_embedding = torch.zeros(
|
| 275 |
+
(
|
| 276 |
+
pixel_values.shape[0] // merge_size_square,
|
| 277 |
+
self.vision_tower.config.hidden_size,
|
| 278 |
+
),
|
| 279 |
+
device=pixel_values.device,
|
| 280 |
+
dtype=pixel_values.dtype,
|
| 281 |
+
)
|
| 282 |
+
|
| 283 |
+
if tp_rank < len(image_sizes_consum):
|
| 284 |
+
idx_start = 0 if tp_rank == 0 else image_sizes_consum[tp_rank - 1].item()
|
| 285 |
+
idx_end = image_sizes_consum[tp_rank].item()
|
| 286 |
+
pixel_values_part = pixel_values[idx_start:idx_end]
|
| 287 |
+
image_grid_thw_part = image_grid_thw_chunk[tp_rank]
|
| 288 |
+
image_embedding_part = self.vision_tower(pixel_values_part, image_grid_thw_part)
|
| 289 |
+
image_embedding[idx_start // merge_size_square : idx_end // merge_size_square] = image_embedding_part
|
| 290 |
+
|
| 291 |
+
group = get_tensor_model_parallel_group().device_group
|
| 292 |
+
torch.distributed.all_reduce(image_embedding, group=group)
|
| 293 |
+
return image_embedding
|
| 294 |
+
|
| 295 |
+
def _process_image_input(self, image_input: DotsOCRImageInputs) -> tuple[torch.Tensor, ...]:
|
| 296 |
+
grid_thw = image_input["image_grid_thw"]
|
| 297 |
+
assert grid_thw.ndim == 2
|
| 298 |
+
|
| 299 |
+
if image_input["type"] == "image_embeds":
|
| 300 |
+
image_embeds = image_input["image_embeds"].type(self.vision_tower.dtype)
|
| 301 |
+
else:
|
| 302 |
+
pixel_values = image_input["pixel_values"].type(self.vision_tower.dtype)
|
| 303 |
+
image_embeds = self.vision_forward(pixel_values, grid_thw)[
|
| 304 |
+
:, : self.config.hidden_size
|
| 305 |
+
]
|
| 306 |
+
|
| 307 |
+
# Split concatenated embeddings for each image item.
|
| 308 |
+
merge_size = self.vision_tower.config.spatial_merge_size
|
| 309 |
+
sizes = grid_thw.prod(-1) // merge_size // merge_size
|
| 310 |
+
|
| 311 |
+
return image_embeds.split(sizes.tolist())
|
| 312 |
+
|
| 313 |
+
def _parse_and_validate_multimodal_inputs(self, **kwargs: object) -> dict:
|
| 314 |
+
modalities = {}
|
| 315 |
+
|
| 316 |
+
# Preserve the order of modalities if there are multiple of them
|
| 317 |
+
# from the order of kwargs.
|
| 318 |
+
for input_key in kwargs:
|
| 319 |
+
if input_key in ("pixel_values", "image_embeds") and "images" not in modalities:
|
| 320 |
+
modalities["images"] = self._parse_and_validate_image_input(**kwargs)
|
| 321 |
+
return modalities
|
| 322 |
+
|
| 323 |
+
def get_language_model(self) -> torch.nn.Module:
|
| 324 |
+
return self.language_model
|
| 325 |
+
|
| 326 |
+
def get_multimodal_embeddings(self, **kwargs: object) -> Optional[MultiModalEmbeddings]:
|
| 327 |
+
modalities = self._parse_and_validate_multimodal_inputs(**kwargs)
|
| 328 |
+
if not modalities:
|
| 329 |
+
return None
|
| 330 |
+
|
| 331 |
+
# The result multimodal_embeddings is tuple of tensors, with each
|
| 332 |
+
# tensor correspoending to a multimodal data item (image or video).
|
| 333 |
+
multimodal_embeddings: tuple[torch.Tensor, ...] = ()
|
| 334 |
+
|
| 335 |
+
# NOTE: It is important to iterate over the keys in this dictionary
|
| 336 |
+
# to preserve the order of the modalities.
|
| 337 |
+
for modality in modalities:
|
| 338 |
+
if modality == "images":
|
| 339 |
+
image_input = modalities["images"]
|
| 340 |
+
vision_embeddings = self._process_image_input(image_input)
|
| 341 |
+
multimodal_embeddings += vision_embeddings
|
| 342 |
+
|
| 343 |
+
return multimodal_embeddings
|
| 344 |
+
|
| 345 |
+
def get_input_embeddings(
|
| 346 |
+
self,
|
| 347 |
+
input_ids: torch.Tensor,
|
| 348 |
+
multimodal_embeddings: Optional[MultiModalEmbeddings] = None,
|
| 349 |
+
) -> torch.Tensor:
|
| 350 |
+
inputs_embeds = self.language_model.get_input_embeddings(input_ids)
|
| 351 |
+
if multimodal_embeddings is not None:
|
| 352 |
+
inputs_embeds = merge_multimodal_embeddings(
|
| 353 |
+
input_ids,
|
| 354 |
+
inputs_embeds,
|
| 355 |
+
multimodal_embeddings,
|
| 356 |
+
[self.config.image_token_id, self.config.video_token_id],
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
return inputs_embeds
|
| 360 |
+
|
| 361 |
+
def get_input_embeddings_v0(
|
| 362 |
+
self,
|
| 363 |
+
input_ids: torch.Tensor,
|
| 364 |
+
image_input: Optional[DotsOCRImagePixelInputs] = None,
|
| 365 |
+
) -> torch.Tensor:
|
| 366 |
+
inputs_embeds = self.get_input_embeddings(input_ids)
|
| 367 |
+
if image_input is not None:
|
| 368 |
+
image_embeds = self._process_image_input(image_input)
|
| 369 |
+
inputs_embeds = merge_multimodal_embeddings(
|
| 370 |
+
input_ids,
|
| 371 |
+
inputs_embeds,
|
| 372 |
+
image_embeds,
|
| 373 |
+
placeholder_token_id=self.config.image_token_id,
|
| 374 |
+
)
|
| 375 |
+
return inputs_embeds
|
| 376 |
+
|
| 377 |
+
def forward(
|
| 378 |
+
self,
|
| 379 |
+
input_ids: Optional[torch.Tensor],
|
| 380 |
+
positions: torch.Tensor,
|
| 381 |
+
intermediate_tensors: Optional[IntermediateTensors] = None,
|
| 382 |
+
inputs_embeds: Optional[torch.Tensor] = None,
|
| 383 |
+
**kwargs,
|
| 384 |
+
) -> Union[torch.Tensor, IntermediateTensors]:
|
| 385 |
+
if intermediate_tensors is not None:
|
| 386 |
+
inputs_embeds = None
|
| 387 |
+
elif inputs_embeds is None and kwargs.get("pixel_values") is not None:
|
| 388 |
+
image_input = self._parse_and_validate_image_input(**kwargs)
|
| 389 |
+
if image_input is None:
|
| 390 |
+
inputs_embeds = None
|
| 391 |
+
else:
|
| 392 |
+
assert input_ids is not None
|
| 393 |
+
inputs_embeds = self.get_input_embeddings_v0(
|
| 394 |
+
input_ids,
|
| 395 |
+
image_input=image_input,
|
| 396 |
+
)
|
| 397 |
+
input_ids = None
|
| 398 |
+
|
| 399 |
+
hidden_states = self.language_model(
|
| 400 |
+
input_ids=input_ids,
|
| 401 |
+
positions=positions,
|
| 402 |
+
intermediate_tensors=intermediate_tensors,
|
| 403 |
+
inputs_embeds=inputs_embeds,
|
| 404 |
+
)
|
| 405 |
+
|
| 406 |
+
return hidden_states
|
| 407 |
+
|
| 408 |
+
def compute_logits(
|
| 409 |
+
self,
|
| 410 |
+
hidden_states: torch.Tensor,
|
| 411 |
+
sampling_metadata: SamplingMetadata,
|
| 412 |
+
) -> Optional[torch.Tensor]:
|
| 413 |
+
return self.language_model.compute_logits(hidden_states, sampling_metadata)
|
| 414 |
+
|
| 415 |
+
def sample(
|
| 416 |
+
self,
|
| 417 |
+
logits: Optional[torch.Tensor],
|
| 418 |
+
sampling_metadata: SamplingMetadata,
|
| 419 |
+
) -> Optional[SamplerOutput]:
|
| 420 |
+
next_tokens = self.sampler(logits, sampling_metadata)
|
| 421 |
+
return next_tokens
|
| 422 |
+
|
| 423 |
+
def load_weights(self, weights: Iterable[Tuple[str, torch.Tensor]]) -> Set[str]:
|
| 424 |
+
loader = AutoWeightsLoader(self)
|
| 425 |
+
return loader.load_weights(weights, mapper=self.hf_to_vllm_mapper)
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
def patch_vllm_chat_placeholder():
|
| 429 |
+
import vllm
|
| 430 |
+
# return when vllm version > 0.9.1
|
| 431 |
+
if not (vllm.__version_tuple__[0]==0 and vllm.__version_tuple__[1] <= 9 and vllm.__version_tuple__[2] <= 1):
|
| 432 |
+
return
|
| 433 |
+
from vllm.entrypoints.chat_utils import BaseMultiModalItemTracker
|
| 434 |
+
|
| 435 |
+
ori = BaseMultiModalItemTracker._placeholder_str
|
| 436 |
+
|
| 437 |
+
def _placeholder_str(self, modality, current_count: int) -> Optional[str]:
|
| 438 |
+
hf_config = self._model_config.hf_config
|
| 439 |
+
model_type = hf_config.model_type
|
| 440 |
+
if modality in ("image",) and model_type in ["dots_ocr"]:
|
| 441 |
+
return "<|img|><|imgpad|><|endofimg|>"
|
| 442 |
+
return ori(self, modality, current_count)
|
| 443 |
+
|
| 444 |
+
BaseMultiModalItemTracker._placeholder_str = _placeholder_str
|
| 445 |
+
|
| 446 |
+
ModelRegistry.register_model(
|
| 447 |
+
"DotsOCRForCausalLM", DotsOCRForCausalLM,
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
patch_vllm_chat_placeholder()
|
modeling_dots_vision.py
ADDED
|
@@ -0,0 +1,520 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
|
| 3 |
+
import torch
|
| 4 |
+
import torch.nn as nn
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import torch.utils.checkpoint
|
| 7 |
+
|
| 8 |
+
flash_attn_available = True
|
| 9 |
+
npu_available = True
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
from flash_attn import flash_attn_varlen_func
|
| 13 |
+
except ImportError:
|
| 14 |
+
flash_attn_available = False
|
| 15 |
+
|
| 16 |
+
from torch.nn import LayerNorm
|
| 17 |
+
from transformers.modeling_utils import PreTrainedModel
|
| 18 |
+
from .configuration_dots import DotsVisionConfig
|
| 19 |
+
|
| 20 |
+
try:
|
| 21 |
+
import torch_npu
|
| 22 |
+
except ImportError:
|
| 23 |
+
npu_available = False
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def rotate_half(x):
|
| 27 |
+
"""Rotates half the hidden dims of the input."""
|
| 28 |
+
x1 = x[..., : x.shape[-1] // 2]
|
| 29 |
+
x2 = x[..., x.shape[-1] // 2:]
|
| 30 |
+
return torch.cat((-x2, x1), dim=-1)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def apply_rotary_pos_emb_vision(tensor: torch.Tensor, freqs: torch.Tensor) -> torch.Tensor:
|
| 34 |
+
orig_dtype = tensor.dtype
|
| 35 |
+
tensor = tensor.float()
|
| 36 |
+
|
| 37 |
+
cos = freqs.cos()
|
| 38 |
+
sin = freqs.sin()
|
| 39 |
+
|
| 40 |
+
cos = cos.unsqueeze(1).repeat(1, 1, 2).unsqueeze(0).float()
|
| 41 |
+
sin = sin.unsqueeze(1).repeat(1, 1, 2).unsqueeze(0).float()
|
| 42 |
+
|
| 43 |
+
output = (tensor * cos) + (rotate_half(tensor) * sin)
|
| 44 |
+
|
| 45 |
+
output = output.to(orig_dtype)
|
| 46 |
+
|
| 47 |
+
return output
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class VisionRotaryEmbedding(nn.Module):
|
| 51 |
+
def __init__(self, dim: int, theta: float = 10000.0) -> None:
|
| 52 |
+
super().__init__()
|
| 53 |
+
inv_freq = 1.0 / (theta ** (torch.arange(0, dim, 2, dtype=torch.float) / dim))
|
| 54 |
+
self.register_buffer("inv_freq", inv_freq, persistent=False)
|
| 55 |
+
|
| 56 |
+
def forward(self, seqlen: int) -> torch.Tensor:
|
| 57 |
+
seq = torch.arange(seqlen, device=self.inv_freq.device, dtype=self.inv_freq.dtype)
|
| 58 |
+
freqs = torch.outer(seq, self.inv_freq)
|
| 59 |
+
return freqs
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class PatchMerger(nn.Module):
|
| 63 |
+
def __init__(
|
| 64 |
+
self,
|
| 65 |
+
dim: int,
|
| 66 |
+
context_dim: int,
|
| 67 |
+
spatial_merge_size: int = 2,
|
| 68 |
+
pre_norm="layernorm",
|
| 69 |
+
init_merger_std=None,
|
| 70 |
+
) -> None:
|
| 71 |
+
super().__init__()
|
| 72 |
+
self.hidden_size = context_dim * (spatial_merge_size ** 2)
|
| 73 |
+
self.pre_norm = pre_norm
|
| 74 |
+
if self.pre_norm == "layernorm":
|
| 75 |
+
self.ln_q = LayerNorm(context_dim, eps=1e-6)
|
| 76 |
+
elif self.pre_norm == "rmsnorm":
|
| 77 |
+
self.ln_q = RMSNorm(context_dim, eps=1e-6)
|
| 78 |
+
else:
|
| 79 |
+
print("no norm in patch merger")
|
| 80 |
+
|
| 81 |
+
self.mlp = nn.Sequential(
|
| 82 |
+
nn.Linear(self.hidden_size, self.hidden_size),
|
| 83 |
+
nn.GELU(),
|
| 84 |
+
nn.Linear(self.hidden_size, dim),
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
if init_merger_std is not None:
|
| 88 |
+
nn.init.normal_(self.mlp[0].weight, mean=0.0, std=init_merger_std)
|
| 89 |
+
nn.init.zeros_(self.mlp[0].bias)
|
| 90 |
+
nn.init.normal_(self.mlp[2].weight, mean=0.0, std=init_merger_std)
|
| 91 |
+
nn.init.zeros_(self.mlp[2].bias)
|
| 92 |
+
|
| 93 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
| 94 |
+
if self.pre_norm:
|
| 95 |
+
x = self.mlp(self.ln_q(x).view(-1, self.hidden_size))
|
| 96 |
+
else:
|
| 97 |
+
x = self.mlp(x.view(-1, self.hidden_size))
|
| 98 |
+
return x
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class VisionAttention(nn.Module):
|
| 102 |
+
def __init__(self, config, dim: int, num_heads: int = 16, bias=True) -> None:
|
| 103 |
+
super().__init__()
|
| 104 |
+
self.num_heads = num_heads
|
| 105 |
+
self.head_dim = dim // num_heads
|
| 106 |
+
self.qkv = nn.Linear(dim, dim * 3, bias=bias)
|
| 107 |
+
self.proj = nn.Linear(dim, dim, bias=bias)
|
| 108 |
+
|
| 109 |
+
def forward(
|
| 110 |
+
self,
|
| 111 |
+
hidden_states: torch.Tensor,
|
| 112 |
+
cu_seqlens: torch.Tensor,
|
| 113 |
+
rotary_pos_emb: torch.Tensor = None,
|
| 114 |
+
) -> torch.Tensor:
|
| 115 |
+
seq_length = hidden_states.shape[0]
|
| 116 |
+
|
| 117 |
+
q, k, v = self.qkv(hidden_states).reshape(seq_length, 3, self.num_heads, -1).permute(1, 0, 2, 3).unbind(0)
|
| 118 |
+
q = apply_rotary_pos_emb_vision(q.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 119 |
+
k = apply_rotary_pos_emb_vision(k.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 120 |
+
|
| 121 |
+
attention_mask = torch.full(
|
| 122 |
+
[1, seq_length, seq_length], torch.finfo(q.dtype).min, device=q.device, dtype=q.dtype
|
| 123 |
+
)
|
| 124 |
+
for i in range(1, len(cu_seqlens)):
|
| 125 |
+
attention_mask[..., cu_seqlens[i - 1]: cu_seqlens[i], cu_seqlens[i - 1]: cu_seqlens[i]] = 0
|
| 126 |
+
|
| 127 |
+
q = q.transpose(0, 1)
|
| 128 |
+
k = k.transpose(0, 1)
|
| 129 |
+
v = v.transpose(0, 1)
|
| 130 |
+
attn_weights = torch.matmul(q, k.transpose(1, 2)) / math.sqrt(self.head_dim)
|
| 131 |
+
attn_weights = attn_weights + attention_mask
|
| 132 |
+
attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(q.dtype)
|
| 133 |
+
attn_output = torch.matmul(attn_weights, v)
|
| 134 |
+
attn_output = attn_output.transpose(0, 1)
|
| 135 |
+
attn_output = attn_output.reshape(seq_length, -1)
|
| 136 |
+
attn_output = self.proj(attn_output)
|
| 137 |
+
return attn_output
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class VisionFlashAttention2(nn.Module):
|
| 141 |
+
def __init__(self, config, dim: int, num_heads: int = 16, bias=True) -> None:
|
| 142 |
+
super().__init__()
|
| 143 |
+
self.num_heads = num_heads
|
| 144 |
+
self.qkv = nn.Linear(dim, dim * 3, bias=bias)
|
| 145 |
+
self.proj = nn.Linear(dim, dim, bias=bias)
|
| 146 |
+
self.config = config
|
| 147 |
+
self.is_causal = config.is_causal
|
| 148 |
+
|
| 149 |
+
def forward(
|
| 150 |
+
self,
|
| 151 |
+
hidden_states: torch.Tensor,
|
| 152 |
+
cu_seqlens: torch.Tensor,
|
| 153 |
+
rotary_pos_emb: torch.Tensor = None,
|
| 154 |
+
) -> torch.Tensor:
|
| 155 |
+
seq_length = hidden_states.shape[0]
|
| 156 |
+
q, k, v = (
|
| 157 |
+
self.qkv(hidden_states).reshape(seq_length, 3, self.num_heads, -1).permute(1, 0, 2, 3).unbind(0)
|
| 158 |
+
) # 'shd'
|
| 159 |
+
q = apply_rotary_pos_emb_vision(q.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 160 |
+
k = apply_rotary_pos_emb_vision(k.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 161 |
+
max_seqlen = (cu_seqlens[1:] - cu_seqlens[:-1]).max().item()
|
| 162 |
+
attn_output = flash_attn_varlen_func(
|
| 163 |
+
q, k, v, cu_seqlens, cu_seqlens, max_seqlen, max_seqlen, causal=self.is_causal
|
| 164 |
+
).reshape(seq_length, -1)
|
| 165 |
+
attn_output = self.proj(attn_output)
|
| 166 |
+
|
| 167 |
+
return attn_output
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class VisionAttentionV2(nn.Module):
|
| 171 |
+
def __init__(self, config, dim: int, num_heads: int = 16, bias=True) -> None:
|
| 172 |
+
super().__init__()
|
| 173 |
+
self.num_heads = num_heads
|
| 174 |
+
self.head_dim = dim // num_heads
|
| 175 |
+
self.qkv = nn.Linear(dim, dim * 3, bias=bias)
|
| 176 |
+
self.proj = nn.Linear(dim, dim, bias=bias)
|
| 177 |
+
|
| 178 |
+
def forward(
|
| 179 |
+
self,
|
| 180 |
+
hidden_states: torch.Tensor,
|
| 181 |
+
cu_seqlens: torch.Tensor,
|
| 182 |
+
rotary_pos_emb: torch.Tensor = None,
|
| 183 |
+
) -> torch.Tensor:
|
| 184 |
+
seq_length = hidden_states.shape[0]
|
| 185 |
+
|
| 186 |
+
q, k, v = self.qkv(hidden_states).reshape(seq_length, 3, self.num_heads, -1).permute(1, 0, 2, 3).unbind(0)
|
| 187 |
+
q = apply_rotary_pos_emb_vision(q.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 188 |
+
k = apply_rotary_pos_emb_vision(k.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 189 |
+
|
| 190 |
+
seqlens = torch.diff(cu_seqlens).tolist()
|
| 191 |
+
|
| 192 |
+
q_list = torch.split(q, seqlens, 0)
|
| 193 |
+
k_list = torch.split(k, seqlens, 0)
|
| 194 |
+
v_list = torch.split(v, seqlens, 0)
|
| 195 |
+
# eager attention 空间复杂度为 O(n^2) , n 为 b*s(batch_size * seq_len), 序列太长容易OOM, 这个实现 更具batch 切分 seq
|
| 196 |
+
# 减少内存需求, 计算相对 continus batching 较慢。
|
| 197 |
+
outputs = []
|
| 198 |
+
for q_i, k_i, v_i in zip(q_list, k_list, v_list):
|
| 199 |
+
q_i = q_i.transpose(0, 1)
|
| 200 |
+
k_i = k_i.transpose(0, 1)
|
| 201 |
+
v_i = v_i.transpose(0, 1)
|
| 202 |
+
out = torch.matmul(q_i, k_i.transpose(1, 2)) / math.sqrt(self.head_dim)
|
| 203 |
+
out = nn.functional.softmax(out, dim=-1, dtype=torch.float32).to(q.dtype)
|
| 204 |
+
out = torch.matmul(out, v_i)
|
| 205 |
+
out = out.transpose(0, 1)
|
| 206 |
+
outputs.append(out)
|
| 207 |
+
|
| 208 |
+
attn_output = torch.concat(outputs, dim=0)
|
| 209 |
+
attn_output = attn_output.reshape(seq_length, -1)
|
| 210 |
+
attn_output = self.proj(attn_output)
|
| 211 |
+
return attn_output
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
class VisionAscendAttention(nn.Module):
|
| 215 |
+
def __init__(self, config, dim: int, num_heads: int = 16, bias=True) -> None:
|
| 216 |
+
super().__init__()
|
| 217 |
+
self.num_heads = num_heads
|
| 218 |
+
self.head_dim = dim // num_heads
|
| 219 |
+
self.qkv = nn.Linear(dim, dim * 3, bias=bias)
|
| 220 |
+
self.proj = nn.Linear(dim, dim, bias=bias)
|
| 221 |
+
self.config = config
|
| 222 |
+
|
| 223 |
+
def forward(
|
| 224 |
+
self,
|
| 225 |
+
hidden_states: torch.Tensor,
|
| 226 |
+
cu_seqlens: torch.Tensor,
|
| 227 |
+
rotary_pos_emb: torch.Tensor = None,
|
| 228 |
+
) -> torch.Tensor:
|
| 229 |
+
seq_length = hidden_states.shape[0]
|
| 230 |
+
q, k, v = self.qkv(hidden_states).reshape(seq_length, 3, self.num_heads, -1).permute(1, 0, 2, 3).unbind(0)
|
| 231 |
+
|
| 232 |
+
q = apply_rotary_pos_emb_vision(q.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 233 |
+
k = apply_rotary_pos_emb_vision(k.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 234 |
+
|
| 235 |
+
attention_mask = torch.ones([1, seq_length, seq_length], device=q.device, dtype=torch.bool)
|
| 236 |
+
for i in range(1, len(cu_seqlens)):
|
| 237 |
+
attention_mask[..., cu_seqlens[i - 1]: cu_seqlens[i], cu_seqlens[i - 1]: cu_seqlens[i]] = False
|
| 238 |
+
|
| 239 |
+
q = q.transpose(0, 1).unsqueeze(0)
|
| 240 |
+
k = k.transpose(0, 1).unsqueeze(0)
|
| 241 |
+
v = v.transpose(0, 1).unsqueeze(0)
|
| 242 |
+
|
| 243 |
+
attn_output = torch_npu.npu_prompt_flash_attention(q, k, v,
|
| 244 |
+
atten_mask=attention_mask,
|
| 245 |
+
num_heads=self.num_heads, input_layout="BNSD",
|
| 246 |
+
scale_value=self.head_dim ** -0.5)
|
| 247 |
+
attn_output = attn_output.squeeze(0).transpose(0, 1)
|
| 248 |
+
attn_output = attn_output.reshape(seq_length, -1)
|
| 249 |
+
attn_output = self.proj(attn_output)
|
| 250 |
+
return attn_output
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
class VisionSdpaAttention(nn.Module):
|
| 254 |
+
def __init__(self, config, dim: int, num_heads: int = 16, bias=True) -> None:
|
| 255 |
+
super().__init__()
|
| 256 |
+
self.num_heads = num_heads
|
| 257 |
+
self.qkv = nn.Linear(dim, dim * 3, bias=bias)
|
| 258 |
+
self.proj = nn.Linear(dim, dim, bias=bias)
|
| 259 |
+
self.config = config
|
| 260 |
+
|
| 261 |
+
def forward(
|
| 262 |
+
self,
|
| 263 |
+
hidden_states: torch.Tensor,
|
| 264 |
+
cu_seqlens: torch.Tensor,
|
| 265 |
+
rotary_pos_emb: torch.Tensor = None,
|
| 266 |
+
) -> torch.Tensor:
|
| 267 |
+
seq_length = hidden_states.shape[0]
|
| 268 |
+
q, k, v = self.qkv(hidden_states).reshape(seq_length, 3, self.num_heads, -1).permute(1, 0, 2, 3).unbind(0)
|
| 269 |
+
|
| 270 |
+
q = apply_rotary_pos_emb_vision(q.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 271 |
+
k = apply_rotary_pos_emb_vision(k.unsqueeze(0), rotary_pos_emb).squeeze(0)
|
| 272 |
+
|
| 273 |
+
attention_mask = torch.zeros([1, seq_length, seq_length], device=q.device, dtype=torch.bool)
|
| 274 |
+
for i in range(1, len(cu_seqlens)):
|
| 275 |
+
attention_mask[..., cu_seqlens[i - 1]: cu_seqlens[i], cu_seqlens[i - 1]: cu_seqlens[i]] = True
|
| 276 |
+
|
| 277 |
+
# Convert q, k, v to 4D to enable : (1, num_heads, seq_length, head_dim)
|
| 278 |
+
q = q.transpose(0, 1).unsqueeze(0) # (1, num_heads, seq_length, head_dim)
|
| 279 |
+
k = k.transpose(0, 1).unsqueeze(0)
|
| 280 |
+
v = v.transpose(0, 1).unsqueeze(0)
|
| 281 |
+
|
| 282 |
+
# See: https://github.com/pytorch/pytorch/issues/127523
|
| 283 |
+
if attention_mask.stride(-1) != 1:
|
| 284 |
+
attention_mask = torch.empty_like(attention_mask, memory_format=torch.contiguous_format).copy_(attention_mask)
|
| 285 |
+
|
| 286 |
+
# use memory efficient backend
|
| 287 |
+
from torch.nn.attention import SDPBackend, sdpa_kernel
|
| 288 |
+
with sdpa_kernel(SDPBackend.EFFICIENT_ATTENTION):
|
| 289 |
+
attn_output = F.scaled_dot_product_attention(q, k, v, attention_mask, dropout_p=0.0)
|
| 290 |
+
|
| 291 |
+
attn_output = attn_output.squeeze(0).transpose(0, 1) # (seq_length, num_heads, head_dim)
|
| 292 |
+
attn_output = attn_output.reshape(seq_length, -1)
|
| 293 |
+
|
| 294 |
+
attn_output = self.proj(attn_output)
|
| 295 |
+
return attn_output
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
DOTS_VISION_ATTENTION_CLASSES = {
|
| 299 |
+
"eager": VisionAttention,
|
| 300 |
+
"eager_v2": VisionAttentionV2, # 内存更少
|
| 301 |
+
"flash_attention_2": VisionFlashAttention2,
|
| 302 |
+
"sdpa": VisionSdpaAttention,
|
| 303 |
+
"ascend_fa": VisionAscendAttention, # ascend, 长序列精度下降严重。
|
| 304 |
+
}
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
class RMSNorm(nn.Module):
|
| 308 |
+
def __init__(self, dim: int, eps: float = 1e-6):
|
| 309 |
+
super().__init__()
|
| 310 |
+
self.weight = nn.Parameter(torch.ones(dim))
|
| 311 |
+
self.eps = eps
|
| 312 |
+
|
| 313 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
| 314 |
+
output = self._norm(x.float()).type_as(x)
|
| 315 |
+
return output * self.weight
|
| 316 |
+
|
| 317 |
+
def extra_repr(self) -> str:
|
| 318 |
+
return f"{tuple(self.weight.shape)}, eps={self.eps}"
|
| 319 |
+
|
| 320 |
+
def _norm(self, x: torch.Tensor) -> torch.Tensor:
|
| 321 |
+
return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps)
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
class DotsSwiGLUFFN(nn.Module):
|
| 325 |
+
def __init__(self, config):
|
| 326 |
+
super().__init__()
|
| 327 |
+
hidden_features = config.intermediate_size
|
| 328 |
+
in_features = config.embed_dim
|
| 329 |
+
bias = config.use_bias
|
| 330 |
+
|
| 331 |
+
self.fc1 = nn.Linear(in_features, hidden_features, bias=bias)
|
| 332 |
+
self.fc2 = nn.Linear(hidden_features, in_features, bias=bias)
|
| 333 |
+
self.fc3 = nn.Linear(in_features, hidden_features, bias=bias)
|
| 334 |
+
|
| 335 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
| 336 |
+
x = F.silu(self.fc1(x)) * self.fc3(x)
|
| 337 |
+
x = self.fc2(x)
|
| 338 |
+
return x
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
class DotsPatchEmbed(nn.Module):
|
| 342 |
+
def __init__(self, config):
|
| 343 |
+
super().__init__()
|
| 344 |
+
self.num_channels = config.num_channels
|
| 345 |
+
self.patch_size = config.patch_size
|
| 346 |
+
self.temporal_patch_size = config.temporal_patch_size
|
| 347 |
+
self.embed_dim = config.embed_dim
|
| 348 |
+
self.config = config
|
| 349 |
+
self.proj = nn.Conv2d(
|
| 350 |
+
config.num_channels,
|
| 351 |
+
config.embed_dim,
|
| 352 |
+
kernel_size=(config.patch_size, config.patch_size),
|
| 353 |
+
stride=(config.patch_size, config.patch_size),
|
| 354 |
+
)
|
| 355 |
+
self.norm = RMSNorm(config.embed_dim, eps=config.rms_norm_eps)
|
| 356 |
+
|
| 357 |
+
def forward(self, x: torch.Tensor, grid_thw=None) -> torch.Tensor:
|
| 358 |
+
x = x.view(-1, self.num_channels, self.temporal_patch_size, self.patch_size, self.patch_size)[:, :, 0]
|
| 359 |
+
x = self.proj(x).view(-1, self.embed_dim)
|
| 360 |
+
x = self.norm(x)
|
| 361 |
+
return x
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
class DotsViTPreprocessor(nn.Module):
|
| 365 |
+
def __init__(self, config):
|
| 366 |
+
super().__init__()
|
| 367 |
+
self.patch_h = config.patch_size
|
| 368 |
+
self.patch_w = config.patch_size
|
| 369 |
+
self.embed_dim = config.embed_dim
|
| 370 |
+
self.config = config
|
| 371 |
+
self.patchifier = DotsPatchEmbed(config)
|
| 372 |
+
|
| 373 |
+
def forward(self, x: torch.Tensor, grid_thw=None) -> torch.Tensor:
|
| 374 |
+
tokens = self.patchifier(x, grid_thw)
|
| 375 |
+
return tokens
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
class DotsVisionBlock(nn.Module):
|
| 379 |
+
def __init__(self, config, attn_implementation: str = "flash_attention_2"):
|
| 380 |
+
super().__init__()
|
| 381 |
+
|
| 382 |
+
if attn_implementation == "flash_attention_2" and not flash_attn_available:
|
| 383 |
+
# fallback to eager
|
| 384 |
+
attn_implementation = "eager"
|
| 385 |
+
print("flash attention not available! fallback to eager implementation ")
|
| 386 |
+
|
| 387 |
+
if attn_implementation == "ascend_fa" and not npu_available:
|
| 388 |
+
attn_implementation = "eager"
|
| 389 |
+
print("flash attention not available! fallback to eager implementation ")
|
| 390 |
+
|
| 391 |
+
self.attn = DOTS_VISION_ATTENTION_CLASSES[attn_implementation](
|
| 392 |
+
config, config.embed_dim, num_heads=config.num_attention_heads, bias=config.use_bias
|
| 393 |
+
)
|
| 394 |
+
self.norm1 = RMSNorm(config.embed_dim, eps=config.rms_norm_eps)
|
| 395 |
+
self.mlp = DotsSwiGLUFFN(config)
|
| 396 |
+
self.norm2 = RMSNorm(config.embed_dim, eps=config.rms_norm_eps)
|
| 397 |
+
|
| 398 |
+
def forward(self, hidden_states, cu_seqlens, rotary_pos_emb) -> torch.Tensor:
|
| 399 |
+
hidden_states = hidden_states + self.attn(
|
| 400 |
+
self.norm1(hidden_states), cu_seqlens=cu_seqlens, rotary_pos_emb=rotary_pos_emb
|
| 401 |
+
)
|
| 402 |
+
hidden_states = hidden_states + self.mlp(self.norm2(hidden_states))
|
| 403 |
+
return hidden_states
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
class DotsVisionTransformer(PreTrainedModel):
|
| 407 |
+
def __init__(self, config: DotsVisionConfig) -> None:
|
| 408 |
+
super().__init__(config)
|
| 409 |
+
self.config = config
|
| 410 |
+
self.spatial_merge_size = config.spatial_merge_size
|
| 411 |
+
|
| 412 |
+
self.patch_embed = DotsViTPreprocessor(config)
|
| 413 |
+
self._init_weights(self.patch_embed.patchifier.proj)
|
| 414 |
+
|
| 415 |
+
head_dim = config.embed_dim // config.num_attention_heads
|
| 416 |
+
|
| 417 |
+
self.rotary_pos_emb = VisionRotaryEmbedding(head_dim // 2)
|
| 418 |
+
|
| 419 |
+
_num_hidden_layers = config.num_hidden_layers
|
| 420 |
+
self.blocks = nn.ModuleList(
|
| 421 |
+
[DotsVisionBlock(config, config.attn_implementation) for _ in range(_num_hidden_layers)]
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
if self.config.post_norm:
|
| 425 |
+
self.post_trunk_norm = RMSNorm(config.embed_dim, eps=config.rms_norm_eps)
|
| 426 |
+
|
| 427 |
+
self.merger = PatchMerger(
|
| 428 |
+
dim=config.hidden_size,
|
| 429 |
+
context_dim=config.embed_dim,
|
| 430 |
+
spatial_merge_size=config.spatial_merge_size,
|
| 431 |
+
init_merger_std=self.config.init_merger_std,
|
| 432 |
+
)
|
| 433 |
+
|
| 434 |
+
self.gradient_checkpointing = False
|
| 435 |
+
self._gradient_checkpointing_func = torch.utils.checkpoint.checkpoint
|
| 436 |
+
|
| 437 |
+
def _init_weights(self, module):
|
| 438 |
+
std = self.config.initializer_range
|
| 439 |
+
if isinstance(module, (nn.Linear, nn.Conv3d)):
|
| 440 |
+
module.weight.data.normal_(mean=0.0, std=std)
|
| 441 |
+
if module.bias is not None:
|
| 442 |
+
module.bias.data.zero_()
|
| 443 |
+
elif isinstance(module, nn.Embedding):
|
| 444 |
+
module.weight.data.normal_(mean=0.0, std=std)
|
| 445 |
+
if module.padding_idx is not None:
|
| 446 |
+
module.weight.data[module.padding_idx].zero_()
|
| 447 |
+
|
| 448 |
+
@property
|
| 449 |
+
def dtype(self) -> torch.dtype:
|
| 450 |
+
return self.blocks[0].mlp.fc2.weight.dtype
|
| 451 |
+
|
| 452 |
+
@property
|
| 453 |
+
def device(self) -> torch.device:
|
| 454 |
+
return self.blocks[0].mlp.fc2.weight.device
|
| 455 |
+
|
| 456 |
+
def get_pos_ids_by_grid(self, grid_thw):
|
| 457 |
+
pos_ids = []
|
| 458 |
+
for t, h, w in grid_thw:
|
| 459 |
+
hpos_ids = torch.arange(h).unsqueeze(1).expand(-1, w)
|
| 460 |
+
hpos_ids = hpos_ids.reshape(
|
| 461 |
+
h // self.spatial_merge_size,
|
| 462 |
+
self.spatial_merge_size,
|
| 463 |
+
w // self.spatial_merge_size,
|
| 464 |
+
self.spatial_merge_size,
|
| 465 |
+
)
|
| 466 |
+
hpos_ids = hpos_ids.permute(0, 2, 1, 3)
|
| 467 |
+
hpos_ids = hpos_ids.flatten()
|
| 468 |
+
|
| 469 |
+
wpos_ids = torch.arange(w).unsqueeze(0).expand(h, -1)
|
| 470 |
+
wpos_ids = wpos_ids.reshape(
|
| 471 |
+
h // self.spatial_merge_size,
|
| 472 |
+
self.spatial_merge_size,
|
| 473 |
+
w // self.spatial_merge_size,
|
| 474 |
+
self.spatial_merge_size,
|
| 475 |
+
)
|
| 476 |
+
wpos_ids = wpos_ids.permute(0, 2, 1, 3)
|
| 477 |
+
wpos_ids = wpos_ids.flatten()
|
| 478 |
+
pos_ids.append(
|
| 479 |
+
torch.stack([hpos_ids, wpos_ids], dim=-1).repeat(t, 1)
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
return pos_ids
|
| 483 |
+
|
| 484 |
+
def rot_pos_emb(self, grid_thw):
|
| 485 |
+
pos_ids = self.get_pos_ids_by_grid(grid_thw)
|
| 486 |
+
pos_ids = torch.cat(pos_ids, dim=0)
|
| 487 |
+
max_grid_size = grid_thw[:, 1:].max()
|
| 488 |
+
rotary_pos_emb_full = self.rotary_pos_emb(max_grid_size)
|
| 489 |
+
rotary_pos_emb = rotary_pos_emb_full[pos_ids].flatten(1)
|
| 490 |
+
return rotary_pos_emb
|
| 491 |
+
|
| 492 |
+
def forward(self, hidden_states: torch.Tensor, grid_thw: torch.Tensor, bf16=True) -> torch.Tensor:
|
| 493 |
+
if bf16:
|
| 494 |
+
hidden_states = hidden_states.bfloat16()
|
| 495 |
+
hidden_states = self.patch_embed(hidden_states, grid_thw)
|
| 496 |
+
|
| 497 |
+
rotary_pos_emb = self.rot_pos_emb(grid_thw)
|
| 498 |
+
|
| 499 |
+
cu_seqlens = torch.repeat_interleave(grid_thw[:, 1] * grid_thw[:, 2], grid_thw[:, 0]).cumsum(
|
| 500 |
+
dim=0,
|
| 501 |
+
dtype=grid_thw.dtype if torch.jit.is_tracing() else torch.int32,
|
| 502 |
+
)
|
| 503 |
+
cu_seqlens = F.pad(cu_seqlens, (1, 0), value=0)
|
| 504 |
+
|
| 505 |
+
for blk in self.blocks:
|
| 506 |
+
if self.gradient_checkpointing and self.training:
|
| 507 |
+
hidden_states = self._gradient_checkpointing_func(
|
| 508 |
+
blk.__call__,
|
| 509 |
+
hidden_states,
|
| 510 |
+
cu_seqlens,
|
| 511 |
+
rotary_pos_emb,
|
| 512 |
+
)
|
| 513 |
+
else:
|
| 514 |
+
hidden_states = blk(hidden_states, cu_seqlens=cu_seqlens, rotary_pos_emb=rotary_pos_emb)
|
| 515 |
+
|
| 516 |
+
if self.config.post_norm:
|
| 517 |
+
hidden_states = self.post_trunk_norm(hidden_states)
|
| 518 |
+
|
| 519 |
+
hidden_states = self.merger(hidden_states)
|
| 520 |
+
return hidden_states
|
preprocessor_config.json
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"auto_map": {
|
| 3 |
+
"AutoProcessor": "configuration_dots.DotsVLProcessor"
|
| 4 |
+
},
|
| 5 |
+
"min_pixels": 3136,
|
| 6 |
+
"max_pixels": 11289600,
|
| 7 |
+
"patch_size": 14,
|
| 8 |
+
"temporal_patch_size": 1,
|
| 9 |
+
"merge_size": 2,
|
| 10 |
+
"image_mean": [
|
| 11 |
+
0.48145466,
|
| 12 |
+
0.4578275,
|
| 13 |
+
0.40821073
|
| 14 |
+
],
|
| 15 |
+
"image_std": [
|
| 16 |
+
0.26862954,
|
| 17 |
+
0.26130258,
|
| 18 |
+
0.27577711
|
| 19 |
+
],
|
| 20 |
+
"image_processor_type": "Qwen2VLImageProcessor",
|
| 21 |
+
"processor_class": "DotsVLProcessor"
|
| 22 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|endoftext|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": "[PAD]"
|
| 25 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
},
|
| 181 |
+
"151665": {
|
| 182 |
+
"content": "<|imgpad|>",
|
| 183 |
+
"lstrip": false,
|
| 184 |
+
"normalized": false,
|
| 185 |
+
"rstrip": false,
|
| 186 |
+
"single_word": false,
|
| 187 |
+
"special": true
|
| 188 |
+
},
|
| 189 |
+
"151666": {
|
| 190 |
+
"content": "<|img|>",
|
| 191 |
+
"lstrip": false,
|
| 192 |
+
"normalized": false,
|
| 193 |
+
"rstrip": false,
|
| 194 |
+
"single_word": false,
|
| 195 |
+
"special": true
|
| 196 |
+
},
|
| 197 |
+
"151667": {
|
| 198 |
+
"content": "<|endofimg|>",
|
| 199 |
+
"lstrip": false,
|
| 200 |
+
"normalized": false,
|
| 201 |
+
"rstrip": false,
|
| 202 |
+
"single_word": false,
|
| 203 |
+
"special": true
|
| 204 |
+
},
|
| 205 |
+
"151668": {
|
| 206 |
+
"content": "<|systemprompt|>",
|
| 207 |
+
"lstrip": false,
|
| 208 |
+
"normalized": false,
|
| 209 |
+
"rstrip": false,
|
| 210 |
+
"single_word": false,
|
| 211 |
+
"special": true
|
| 212 |
+
},
|
| 213 |
+
"151669": {
|
| 214 |
+
"content": "<|endofsystemprompt|>",
|
| 215 |
+
"lstrip": false,
|
| 216 |
+
"normalized": false,
|
| 217 |
+
"rstrip": false,
|
| 218 |
+
"single_word": false,
|
| 219 |
+
"special": true
|
| 220 |
+
},
|
| 221 |
+
"151670": {
|
| 222 |
+
"content": "<|user|>",
|
| 223 |
+
"lstrip": false,
|
| 224 |
+
"normalized": false,
|
| 225 |
+
"rstrip": false,
|
| 226 |
+
"single_word": false,
|
| 227 |
+
"special": true
|
| 228 |
+
},
|
| 229 |
+
"151671": {
|
| 230 |
+
"content": "<|endofuser|>",
|
| 231 |
+
"lstrip": false,
|
| 232 |
+
"normalized": false,
|
| 233 |
+
"rstrip": false,
|
| 234 |
+
"single_word": false,
|
| 235 |
+
"special": true
|
| 236 |
+
},
|
| 237 |
+
"151672": {
|
| 238 |
+
"content": "<|assistant|>",
|
| 239 |
+
"lstrip": false,
|
| 240 |
+
"normalized": false,
|
| 241 |
+
"rstrip": false,
|
| 242 |
+
"single_word": false,
|
| 243 |
+
"special": true
|
| 244 |
+
},
|
| 245 |
+
"151673": {
|
| 246 |
+
"content": "<|endofassistant|>",
|
| 247 |
+
"lstrip": false,
|
| 248 |
+
"normalized": false,
|
| 249 |
+
"rstrip": false,
|
| 250 |
+
"single_word": false,
|
| 251 |
+
"special": true
|
| 252 |
+
},
|
| 253 |
+
"151674": {
|
| 254 |
+
"content": "<|ref_start|>",
|
| 255 |
+
"lstrip": false,
|
| 256 |
+
"normalized": false,
|
| 257 |
+
"rstrip": false,
|
| 258 |
+
"single_word": false,
|
| 259 |
+
"special": true
|
| 260 |
+
},
|
| 261 |
+
"151675": {
|
| 262 |
+
"content": "<|ref_end|>",
|
| 263 |
+
"lstrip": false,
|
| 264 |
+
"normalized": false,
|
| 265 |
+
"rstrip": false,
|
| 266 |
+
"single_word": false,
|
| 267 |
+
"special": true
|
| 268 |
+
},
|
| 269 |
+
"151676": {
|
| 270 |
+
"content": "[SEP]",
|
| 271 |
+
"lstrip": false,
|
| 272 |
+
"normalized": false,
|
| 273 |
+
"rstrip": false,
|
| 274 |
+
"single_word": false,
|
| 275 |
+
"special": true
|
| 276 |
+
},
|
| 277 |
+
"151677": {
|
| 278 |
+
"content": "<|pic|>",
|
| 279 |
+
"lstrip": false,
|
| 280 |
+
"normalized": false,
|
| 281 |
+
"rstrip": false,
|
| 282 |
+
"single_word": false,
|
| 283 |
+
"special": true
|
| 284 |
+
},
|
| 285 |
+
"151678": {
|
| 286 |
+
"content": "<|text|>",
|
| 287 |
+
"lstrip": false,
|
| 288 |
+
"normalized": false,
|
| 289 |
+
"rstrip": false,
|
| 290 |
+
"single_word": false,
|
| 291 |
+
"special": true
|
| 292 |
+
},
|
| 293 |
+
"151679": {
|
| 294 |
+
"content": "<|pictotext|>",
|
| 295 |
+
"lstrip": false,
|
| 296 |
+
"normalized": false,
|
| 297 |
+
"rstrip": false,
|
| 298 |
+
"single_word": false,
|
| 299 |
+
"special": true
|
| 300 |
+
},
|
| 301 |
+
"151680": {
|
| 302 |
+
"content": "[PAD]",
|
| 303 |
+
"lstrip": false,
|
| 304 |
+
"normalized": false,
|
| 305 |
+
"rstrip": false,
|
| 306 |
+
"single_word": false,
|
| 307 |
+
"special": true
|
| 308 |
+
},
|
| 309 |
+
"151681": {
|
| 310 |
+
"content": "<|slice|>",
|
| 311 |
+
"lstrip": false,
|
| 312 |
+
"normalized": false,
|
| 313 |
+
"rstrip": false,
|
| 314 |
+
"single_word": false,
|
| 315 |
+
"special": true
|
| 316 |
+
},
|
| 317 |
+
"151682": {
|
| 318 |
+
"content": "<|endofslice|>",
|
| 319 |
+
"lstrip": false,
|
| 320 |
+
"normalized": false,
|
| 321 |
+
"rstrip": false,
|
| 322 |
+
"single_word": false,
|
| 323 |
+
"special": true
|
| 324 |
+
},
|
| 325 |
+
"151683": {
|
| 326 |
+
"content": "<|imgrowend|>",
|
| 327 |
+
"lstrip": false,
|
| 328 |
+
"normalized": false,
|
| 329 |
+
"rstrip": false,
|
| 330 |
+
"single_word": false,
|
| 331 |
+
"special": true
|
| 332 |
+
},
|
| 333 |
+
"151684": {
|
| 334 |
+
"content": "<|polygon_start|>",
|
| 335 |
+
"lstrip": false,
|
| 336 |
+
"normalized": false,
|
| 337 |
+
"rstrip": false,
|
| 338 |
+
"single_word": false,
|
| 339 |
+
"special": true
|
| 340 |
+
},
|
| 341 |
+
"151685": {
|
| 342 |
+
"content": "<|polygon_end|>",
|
| 343 |
+
"lstrip": false,
|
| 344 |
+
"normalized": false,
|
| 345 |
+
"rstrip": false,
|
| 346 |
+
"single_word": false,
|
| 347 |
+
"special": true
|
| 348 |
+
},
|
| 349 |
+
"151686": {
|
| 350 |
+
"content": "<|image_gen_start|>",
|
| 351 |
+
"lstrip": false,
|
| 352 |
+
"normalized": false,
|
| 353 |
+
"rstrip": false,
|
| 354 |
+
"single_word": false,
|
| 355 |
+
"special": true
|
| 356 |
+
},
|
| 357 |
+
"151687": {
|
| 358 |
+
"content": "<|image_gen_end|>",
|
| 359 |
+
"lstrip": false,
|
| 360 |
+
"normalized": false,
|
| 361 |
+
"rstrip": false,
|
| 362 |
+
"single_word": false,
|
| 363 |
+
"special": true
|
| 364 |
+
}
|
| 365 |
+
},
|
| 366 |
+
"additional_special_tokens": [
|
| 367 |
+
"<|im_start|>",
|
| 368 |
+
"<|im_end|>",
|
| 369 |
+
"<|object_ref_start|>",
|
| 370 |
+
"<|object_ref_end|>",
|
| 371 |
+
"<|box_start|>",
|
| 372 |
+
"<|box_end|>",
|
| 373 |
+
"<|quad_start|>",
|
| 374 |
+
"<|quad_end|>",
|
| 375 |
+
"<|vision_start|>",
|
| 376 |
+
"<|vision_end|>",
|
| 377 |
+
"<|vision_pad|>",
|
| 378 |
+
"<|image_pad|>",
|
| 379 |
+
"<|video_pad|>"
|
| 380 |
+
],
|
| 381 |
+
"bos_token": null,
|
| 382 |
+
"chat_template": "{%- for m in messages %}\n {%- if m.role == 'system' %}\n {{- '<|system|>' + m.content + '<|endofsystem|>\\n' }}\n {%- elif m.role == 'user' %}\n {{- '<|user|>' + m.content + '<|endofuser|>' }}\n {%- elif m.role == 'assistant' %}\n {{- '<|assistant|>' + m.content }}\n {%- if not loop.last %}\n {{- '<|endofassistant|>' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if messages[-1].role != 'assistant' %}\n {{- '<|assistant|>' }}\n{%- endif %}",
|
| 383 |
+
"clean_up_tokenization_spaces": false,
|
| 384 |
+
"eos_token": "<|endoftext|>",
|
| 385 |
+
"errors": "replace",
|
| 386 |
+
"model_max_length": 131072,
|
| 387 |
+
"pad_token": "[PAD]",
|
| 388 |
+
"split_special_tokens": false,
|
| 389 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 390 |
+
"unk_token": null
|
| 391 |
+
}
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|