Commit ·
ea0b9a5
0
Parent(s):
Squashed to single commit (history removed)
Browse files- .gitattributes +40 -0
- LICENSE +190 -0
- README.md +414 -0
- aggregate_npm.png +3 -0
- benchmarks.png +3 -0
- config.json +32 -0
- emissions.csv +39 -0
- evals.yaml +14 -0
- evals_all_steps.csv +37 -0
- evals_for_reference.csv +25 -0
- generation_config.json +14 -0
- learning_curves.png +3 -0
- logo_lilmoo.png +3 -0
- model.safetensors +3 -0
- performance_vs_compute.png +3 -0
- special_tokens_map.json +74 -0
- tokenizer.json +0 -0
- tokenizer_config.json +436 -0
- train_logs.parquet +3 -0
- training_config.yml +79 -0
- val_logs.parquet +3 -0
.gitattributes
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
learning_curves.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
logo_lilmoo.png filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
aggregate_npm.png filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
benchmarks.png filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
performance_vs_compute.png filter=lfs diff=lfs merge=lfs -text
|
LICENSE
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
Copyright Nicholas Kluge Corrêa, Shiza Fatimah, Aniket Sen, and Sophia Falk
|
| 179 |
+
|
| 180 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 181 |
+
you may not use this file except in compliance with the License.
|
| 182 |
+
You may obtain a copy of the License at
|
| 183 |
+
|
| 184 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 185 |
+
|
| 186 |
+
Unless required by applicable law or agreed to in writing, software
|
| 187 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 188 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 189 |
+
See the License for the specific language governing permissions and
|
| 190 |
+
limitations under the License.
|
README.md
ADDED
|
@@ -0,0 +1,414 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
language:
|
| 3 |
+
- hi
|
| 4 |
+
license: apache-2.0
|
| 5 |
+
library_name: transformers
|
| 6 |
+
tags:
|
| 7 |
+
- text-generation-inference
|
| 8 |
+
datasets:
|
| 9 |
+
- Polygl0t/GigaLekh
|
| 10 |
+
metrics:
|
| 11 |
+
- perplexity
|
| 12 |
+
pipeline_tag: text-generation
|
| 13 |
+
widget:
|
| 14 |
+
- text: भारत की राजधानी क्या है?
|
| 15 |
+
example_title: उदाहरण
|
| 16 |
+
- text: भारत का राष्ट्रीय पक्षी कौन सा है?
|
| 17 |
+
example_title: उदाहरण
|
| 18 |
+
inference:
|
| 19 |
+
parameters:
|
| 20 |
+
repetition_penalty: 1.2
|
| 21 |
+
temperature: 0.1
|
| 22 |
+
top_k: 50
|
| 23 |
+
top_p: 1
|
| 24 |
+
max_new_tokens: 150
|
| 25 |
+
co2_eq_emissions:
|
| 26 |
+
emissions: 538740
|
| 27 |
+
source: CodeCarbon
|
| 28 |
+
training_type: pre-training
|
| 29 |
+
geographical_location: Germany
|
| 30 |
+
hardware_used: NVIDIA A100-SXM4-80GB
|
| 31 |
+
model-index:
|
| 32 |
+
- name: LilMoo-v0.1
|
| 33 |
+
results:
|
| 34 |
+
- task:
|
| 35 |
+
type: text-generation
|
| 36 |
+
name: Text Generation
|
| 37 |
+
dataset:
|
| 38 |
+
name: CSQA-HI
|
| 39 |
+
type: ai4bharat/indic_glue
|
| 40 |
+
split: test
|
| 41 |
+
args:
|
| 42 |
+
num_few_shot: 5
|
| 43 |
+
metrics:
|
| 44 |
+
- type: acc_norm
|
| 45 |
+
value: 37.12
|
| 46 |
+
name: normalized accuracy
|
| 47 |
+
source:
|
| 48 |
+
url: https://github.com/Polygl0t/lm-evaluation-harness
|
| 49 |
+
name: Language Model Evaluation Harness (Polyglot)
|
| 50 |
+
- task:
|
| 51 |
+
type: text-generation
|
| 52 |
+
name: Text Generation
|
| 53 |
+
dataset:
|
| 54 |
+
name: MILU-HI
|
| 55 |
+
type: ai4bharat/MILU
|
| 56 |
+
split: test
|
| 57 |
+
args:
|
| 58 |
+
num_few_shot: 5
|
| 59 |
+
metrics:
|
| 60 |
+
- type: acc_norm
|
| 61 |
+
value: 28.55
|
| 62 |
+
name: normalized accuracy
|
| 63 |
+
source:
|
| 64 |
+
url: https://github.com/AI4Bharat/MILU
|
| 65 |
+
name: MILU -A Multi-task Indic Language Understanding Benchmark
|
| 66 |
+
- task:
|
| 67 |
+
type: text-generation
|
| 68 |
+
name: Text Generation
|
| 69 |
+
dataset:
|
| 70 |
+
name: ARC (HI)
|
| 71 |
+
type: arc_hi
|
| 72 |
+
args:
|
| 73 |
+
num_few_shot: 5
|
| 74 |
+
metrics:
|
| 75 |
+
- type: acc_norm
|
| 76 |
+
value: 30.05
|
| 77 |
+
name: normalized accuracy
|
| 78 |
+
source:
|
| 79 |
+
url: https://github.com/Polygl0t/lm-evaluation-harness/tree/arc_poly
|
| 80 |
+
name: Language Model Evaluation Harness (Multilingual)
|
| 81 |
+
- task:
|
| 82 |
+
type: text-generation
|
| 83 |
+
name: Text Generation
|
| 84 |
+
dataset:
|
| 85 |
+
name: HellaSwag (HI)
|
| 86 |
+
type: hellaswag_hi
|
| 87 |
+
args:
|
| 88 |
+
num_few_shot: 0
|
| 89 |
+
metrics:
|
| 90 |
+
- type: acc_norm
|
| 91 |
+
value: 37.06
|
| 92 |
+
name: normalized accuracy
|
| 93 |
+
source:
|
| 94 |
+
url: https://github.com/Polygl0t/lm-evaluation-harness/tree/hellaswag_poly
|
| 95 |
+
name: Language Model Evaluation Harness (Multilingual)
|
| 96 |
+
- task:
|
| 97 |
+
type: text-generation
|
| 98 |
+
name: Text Generation
|
| 99 |
+
dataset:
|
| 100 |
+
name: MMLU (HI)
|
| 101 |
+
type: mmlu_hi
|
| 102 |
+
args:
|
| 103 |
+
num_few_shot: 5
|
| 104 |
+
metrics:
|
| 105 |
+
- type: acc_norm
|
| 106 |
+
value: 26.19
|
| 107 |
+
name: normalized accuracy
|
| 108 |
+
source:
|
| 109 |
+
url: https://github.com/Polygl0t/lm-evaluation-harness/tree/mmlu_poly
|
| 110 |
+
name: Language Model Evaluation Harness (Multilingual)
|
| 111 |
+
- task:
|
| 112 |
+
type: text-generation
|
| 113 |
+
name: Text Generation
|
| 114 |
+
dataset:
|
| 115 |
+
name: Global PIQA
|
| 116 |
+
type: global_piqa_hi
|
| 117 |
+
args:
|
| 118 |
+
num_few_shot: 5
|
| 119 |
+
metrics:
|
| 120 |
+
- type: acc_norm
|
| 121 |
+
value: 0.65
|
| 122 |
+
name: normalized accuracy
|
| 123 |
+
source:
|
| 124 |
+
url: https://github.com/EleutherAI/lm-evaluation-harness
|
| 125 |
+
name: Language Model Evaluation Harness
|
| 126 |
+
---
|
| 127 |
+
# LilMoo-v0.1
|
| 128 |
+
|
| 129 |
+
<img src="./logo_lilmoo.png" alt="A round logo of a cartoon cow in a hoodie, cap, and gold chain with the text 'LilMoo' below." height="80" style="display: block; margin: 0 auto;">
|
| 130 |
+
|
| 131 |
+
## Model Summary
|
| 132 |
+
|
| 133 |
+
**[LilMoo-v0.1](https://huggingface.co/Polygl0t/LilMoo-v0.1)** is a decoder-transformer natively pretrained in Hindi. LilMoo is part of the [Polygl0t](https://huggingface.co/Polygl0t) initiative to advance language models for low-resource languages.
|
| 134 |
+
|
| 135 |
+
## Details
|
| 136 |
+
|
| 137 |
+
- **Architecture:** a Transformer-based model pre-trained via causal language modeling
|
| 138 |
+
- **Size:** 670,127,616 parameters
|
| 139 |
+
- **Context length:** 4096 tokens
|
| 140 |
+
- **Dataset(s):** [Polygl0t/GigaLekh](https://huggingface.co/datasets/Polygl0t/GigaLekh)
|
| 141 |
+
- **Language:** Hindi
|
| 142 |
+
- **Batch size:** 2,097,152 tokens
|
| 143 |
+
- **Number of steps:** 179,590
|
| 144 |
+
- **GPU:** 8 NVIDIA A100-SXM4-80GB
|
| 145 |
+
- **Training time**: ~ 347 hours
|
| 146 |
+
- **Emissions:** 538 KgCO2 (Germany)
|
| 147 |
+
- **Total energy consumption:** 1414 kWh
|
| 148 |
+
|
| 149 |
+
This repository has the [source code](https://github.com/Polygl0t/Polygl0t) used to train this model. The full configuration used for training is available in this [configuration file](training_config.yml).
|
| 150 |
+
|
| 151 |
+
### Learning Rate
|
| 152 |
+
|
| 153 |
+
This model was trained with a cosine learning rate schedule with a linear warmup over the first 1,000 steps to a peak learning rate of 7e-4, followed by a cosine decay to 7e-5. The minimum learning rate was reached and held constant for the last 10% of training. Checkpoints were saved every 5,000 steps, which equates to approximately 10 billion tokens.
|
| 154 |
+
|
| 155 |
+
The `main` branch of this repository contains the final checkpoint saved at step 179,590. All other checkpoints are available as separate branches. To load a specific checkpoint, you can use the following code snippet:
|
| 156 |
+
|
| 157 |
+
```python
|
| 158 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 159 |
+
|
| 160 |
+
model_id = "Polygl0t/LilMoo-v0.1"
|
| 161 |
+
revision = "step_5000" # Change this to the desired checkpoint branch
|
| 162 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 163 |
+
model = AutoModelForCausalLM.from_pretrained(model_id, revision=revision)
|
| 164 |
+
```
|
| 165 |
+
|
| 166 |
+
<details>
|
| 167 |
+
<summary><b>Learning Curves</b></summary>
|
| 168 |
+
|
| 169 |
+

|
| 170 |
+
|
| 171 |
+
This plot illustrates the evolution of model performance (measured by loss and perplexity) as a function of training time, measured in tokens seen during training.
|
| 172 |
+
|
| 173 |
+
</details>
|
| 174 |
+
|
| 175 |
+
## Intended Uses
|
| 176 |
+
|
| 177 |
+
The primary intended use LilMoo is to serve as foundations for research and development involving native Hindi language modeling. Checkpoints saved during training are designed to provide a controlled setting for performing comparative experiments, specifically regarding the effects of active pretraining on the performance of currently available benchmarks. You may also fine-tune and adapt LilMoo for deployment if your use follows the Apache 2.0 license. If you decide to use LilMoo as a basis for your fine-tuned model, please conduct your own risk and bias assessment.
|
| 178 |
+
|
| 179 |
+
## Out-of-scope Use
|
| 180 |
+
|
| 181 |
+
- LilMoo is **not intended for deployment**. It is not an out-of-the-box product and should not be used for human-facing interactions.
|
| 182 |
+
|
| 183 |
+
- LilMoo is for **the Hindi language only** and is unsuitable for text generation tasks in other languages.
|
| 184 |
+
|
| 185 |
+
- LilMoo has **not been fine-tuned** for downstream tasks.
|
| 186 |
+
|
| 187 |
+
## Basic usage
|
| 188 |
+
|
| 189 |
+
### Via HuggingFace Transformers
|
| 190 |
+
|
| 191 |
+
Using the `pipeline`:
|
| 192 |
+
|
| 193 |
+
```python
|
| 194 |
+
from transformers import pipeline
|
| 195 |
+
|
| 196 |
+
generator = pipeline("text-generation", model="Polygl0t/LilMoo-v0.1")
|
| 197 |
+
|
| 198 |
+
completions = generator("भारत की राजधानी क्या है?", num_return_sequences=2, max_new_tokens=150)
|
| 199 |
+
|
| 200 |
+
for comp in completions:
|
| 201 |
+
print(f"🤖 {comp['generated_text']}")
|
| 202 |
+
```
|
| 203 |
+
|
| 204 |
+
Using the `AutoTokenizer` and `AutoModelForCausalLM`:
|
| 205 |
+
|
| 206 |
+
```python
|
| 207 |
+
from transformers import GenerationConfig, TextGenerationPipeline, AutoTokenizer, AutoModelForCausalLM
|
| 208 |
+
import torch
|
| 209 |
+
|
| 210 |
+
# Specify the model and tokenizer
|
| 211 |
+
model_id = "Polygl0t/LilMoo-v0.1"
|
| 212 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 213 |
+
model = AutoModelForCausalLM.from_pretrained(model_id)
|
| 214 |
+
|
| 215 |
+
# Specify the generation parameters as you like
|
| 216 |
+
generation_config = GenerationConfig(
|
| 217 |
+
**{
|
| 218 |
+
"do_sample": True,
|
| 219 |
+
"max_new_tokens": 150,
|
| 220 |
+
"renormalize_logits": True,
|
| 221 |
+
"repetition_penalty": 1.2,
|
| 222 |
+
"temperature": 0.1,
|
| 223 |
+
"top_k": 50,
|
| 224 |
+
"top_p": 1.0,
|
| 225 |
+
"use_cache": True,
|
| 226 |
+
}
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 230 |
+
generator = TextGenerationPipeline(model=model, task="text-generation", tokenizer=tokenizer, device=device)
|
| 231 |
+
|
| 232 |
+
# Generate text
|
| 233 |
+
prompt = "भारत की राजधानी क्या है?"
|
| 234 |
+
completion = generator(prompt, generation_config=generation_config)
|
| 235 |
+
print(completion[0]['generated_text'])
|
| 236 |
+
```
|
| 237 |
+
|
| 238 |
+
### Via vLLM Inference
|
| 239 |
+
|
| 240 |
+
```python
|
| 241 |
+
from vllm import LLM, SamplingParams
|
| 242 |
+
from transformers import AutoTokenizer
|
| 243 |
+
import torch
|
| 244 |
+
|
| 245 |
+
model_id = "Polygl0t/LilMoo-v0.1"
|
| 246 |
+
prompt = "भारत की राजधानी क्या है?"
|
| 247 |
+
|
| 248 |
+
tokenizer = AutoTokenizer.from_pretrained(
|
| 249 |
+
model_id,
|
| 250 |
+
use_fast=True,
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
model = LLM(
|
| 254 |
+
model=model_id,
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
sampling_params = SamplingParams(
|
| 258 |
+
max_tokens=150,
|
| 259 |
+
stop=[tokenizer.eos_token],
|
| 260 |
+
stop_token_ids=[tokenizer.eos_token_id],
|
| 261 |
+
n=2,
|
| 262 |
+
temperature=0.1,
|
| 263 |
+
repetition_penalty=1.2,
|
| 264 |
+
top_k=50,
|
| 265 |
+
top_p=1.0
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
outputs = model.generate([prompt], sampling_params)
|
| 269 |
+
|
| 270 |
+
for output in outputs:
|
| 271 |
+
print(f"🤖 {output.outputs[0].text}")
|
| 272 |
+
```
|
| 273 |
+
|
| 274 |
+
## Limitations
|
| 275 |
+
|
| 276 |
+
Like almost all other language models trained on large text datasets scraped from the web, the LilMoo shows behavior that does not make it an out-of-the-box solution to many real-world applications, especially those requiring factual, reliable, and nontoxic text generation. LilMoo is subject to the following:
|
| 277 |
+
|
| 278 |
+
- **Hallucinations:** LilMoo can produce content that can be mistaken as true facts, but are misleading or entirely false, i.e., hallucination.
|
| 279 |
+
|
| 280 |
+
- **Biases and Toxicity:** LilMoo inherits the social and historical stereotypes from the data used to train it. Given these biases, the model can produce toxic content, i.e., harmful, offensive, or detrimental to individuals, groups, or communities.
|
| 281 |
+
|
| 282 |
+
- **Language Limitations:** LilMoo is primarily designed to interact with Hindi. Other languages might challenge its comprehension, leading to potential misinterpretations or errors in response.
|
| 283 |
+
|
| 284 |
+
- **Repetition and Verbosity:** LilMoo may get stuck on repetition loops (especially if the repetition penalty during generations is set to a meager value) or produce verbose responses unrelated to the prompt it was given.
|
| 285 |
+
|
| 286 |
+
Hence, even though LilMoo is released with a permissive license, we urge users to perform their risk analysis on them if they intend to use them for real-world applications.
|
| 287 |
+
|
| 288 |
+
## Evaluations
|
| 289 |
+
|
| 290 |
+
The table below compares our two versions of LilMoo against other base models of similar size. The NPM (Normalized Performance Metric) is a metric designed to provide a balanced view of model performance across various tasks, accounting for the inherent difficulty of each task. It normalizes the performance of each model on a given task by comparing it to a baseline performance, which represents a the performance of a random model.
|
| 291 |
+
|
| 292 |
+
| | NPM (mean) | ARC | HellaSwag | MMLU | CSQA | MILU | Global PIQA |
|
| 293 |
+
|-----------------|------------|-------|-----------|----------|----------|--------- |--------------|
|
| 294 |
+
| LilMoo-v0.1 | 8.74 | 30.05 | 37.06 | 26.19 | 37.12 | 28.55 | 0.65 |
|
| 295 |
+
| Qwen3-0.6B | 2.2 | 26.02 | 28.90 | 29.65 | 29.95 | 28.86 | 0.54 |
|
| 296 |
+
| Qwen2.5-0.5B | 2.15 | 23.71 | 28.63 | 27.78 | 30.01 | 28.48 | 0.57 |
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
<details>
|
| 300 |
+
<summary><b>Benchmark Scores Across Runs</b></summary>
|
| 301 |
+
|
| 302 |
+

|
| 303 |
+
|
| 304 |
+
This plot illustrates the evolution of model performance across different benchmarks as a function of training time, measured in tokens seen during training. LilMoo models are compared against two baseline models: Qwen2.5-0.5B and Qwen3-0.6B, which are state-of-the-art multilingual models.
|
| 305 |
+
|
| 306 |
+
</details>
|
| 307 |
+
|
| 308 |
+
<details>
|
| 309 |
+
<summary><b>Aggregate NPM Across Benchmarks</b></summary>
|
| 310 |
+
|
| 311 |
+

|
| 312 |
+
|
| 313 |
+
This plot illustrates the evolution of model performance (measured by NPM mean) as a function of training time, measured in tokens seen during training. NPM scores are compared against two baseline models: Qwen2.5-0.5B and Qwen3-0.6B, which are state-of-the-art multilingual models. The sp (spearman correlation) between token ingestion and NPM mean is displayed, serving as an indicator of the monotonic relationship between tokens seen and performance.
|
| 314 |
+
|
| 315 |
+
</details>
|
| 316 |
+
|
| 317 |
+
<details>
|
| 318 |
+
<summary><b>Performance vs Compute</b></summary>
|
| 319 |
+
|
| 320 |
+

|
| 321 |
+
|
| 322 |
+
This plot compares the compute requirements (measured as C = 6 * N * D, where N is the number of parameters and D is the number of tokens processed) against the performance of each model (measured by NPM mean). It highlights the trade-offs between model size, training data, and performance. LilMoo models are compared against two baseline models: Qwen2.5-0.5B and Qwen3-0.6B, which are state-of-the-art multilingual models.
|
| 323 |
+
|
| 324 |
+
</details>
|
| 325 |
+
|
| 326 |
+
<details>
|
| 327 |
+
<summary><b>Text Generation Examples</b></summary>
|
| 328 |
+
|
| 329 |
+
All models were prompted with the same Hindi prompts, and the generations were produced with the default parameters specified in the `generation_config.json` of each model. All models were set to generate up to 250 new tokens. LilMoo models generally produce more text given their custom tokenizer and vocabulary, which is more adapted to Hindi.
|
| 330 |
+
|
| 331 |
+
<table border="1" cellpadding="5" cellspacing="0">
|
| 332 |
+
<thead>
|
| 333 |
+
<tr>
|
| 334 |
+
<th>Model</th>
|
| 335 |
+
<th>Prompt</th>
|
| 336 |
+
<th>Generated Completion</th>
|
| 337 |
+
</tr>
|
| 338 |
+
</thead>
|
| 339 |
+
<tbody>
|
| 340 |
+
<tr>
|
| 341 |
+
<td>Qwen/Qwen2.5-0.5B</td>
|
| 342 |
+
<td>
|
| 343 |
+
<span style="color: red;">भारत की राजधानी क्या है?</span>
|
| 344 |
+
</td>
|
| 345 |
+
<td>
|
| 346 |
+
<i> भारत की राजधानी क्या है? | चिन्होराना\nभारत की राजधानी क्या है?\nभारत की राजधानी क्या है?\nभारत की राजधानी क्या है? एक राजधानी को बीच करने से चमखान लेने की तुलना में अब अधिक जानकारी हैं। आज हम एक बार जानते हैं कि राजधानी इतिहास में जब राजनीति की संस्था पर खेत थी तो एक बच्चा ने भारत के विभिन्न क्षेत .......</i>
|
| 347 |
+
</td>
|
| 348 |
+
</tr>
|
| 349 |
+
<tr>
|
| 350 |
+
<td></td>
|
| 351 |
+
<td>
|
| 352 |
+
<span style="color: red;">भारत का राष्ट्रीय पक्षी क्या है?</span>
|
| 353 |
+
</td>
|
| 354 |
+
<td>
|
| 355 |
+
<i>भारत का राष्ट्रीय पक्षी क्या है? विशेष राष्ट्रीय पक्षी नवीन पक्षी स्वयंच कर्मचारी राष्ट्रीय राज्य अमेरिकी राष्ट्रीय द्वितीय भारतीय संघ और अमेरिकी राष्ट्रीय यूरोपीय चित्र्या विकास विवरण करने के लिए राष्ट्रीय पक्षी के रूप में संघीय अनुभव है। पक्षी के रूप में विशेष राष्ट्रीय पक्षी नवीन पक्षी स्वयंच कर्मचार .... </i>
|
| 356 |
+
</td>
|
| 357 |
+
</tr>
|
| 358 |
+
<tr>
|
| 359 |
+
<td>Qwen/Qwen3-0.6B</td>
|
| 360 |
+
<td>
|
| 361 |
+
<span style="color: red;">भारत की राजधानी क्या है?</span>
|
| 362 |
+
</td>
|
| 363 |
+
<td>
|
| 364 |
+
<i>भारत की राजधानी क्या है? जारी रूप से जानने के लिए उपयोग करें राजमार्त बाजार बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के बाजार के ...</i>
|
| 365 |
+
</td>
|
| 366 |
+
</tr>
|
| 367 |
+
<tr>
|
| 368 |
+
<td></td>
|
| 369 |
+
<td>
|
| 370 |
+
<span style="color: red;">भारत का राष्ट्रीय पक्षी क्या है?</span>
|
| 371 |
+
</td>
|
| 372 |
+
<td>
|
| 373 |
+
<i>भारत का राष्ट्रीय पक्षी क्या है? जारी अवधारणा अंतर्गत बर्बल प्रतिबेद्य के निर्माण निर्माण नामक चित्र जातक एक साथ करें। आप के पास एक बहुत लोटा गया ज्ञान के बारे अपनी जातक रेल एक साथ जातक रेल के निर्माण के देखा जाएं। एक कार बर्बल प्रतिबेद्य के निर्माण अपनी जातक रेल के निर्माण के निर्माण नामक चित्र जातक के रेल ......</i>
|
| 374 |
+
</td>
|
| 375 |
+
</tr>
|
| 376 |
+
<tr>
|
| 377 |
+
<td>Polygl0t/LilMoo-v0.1</td>
|
| 378 |
+
<td>
|
| 379 |
+
<span style="color: red;">भारत की राजधानी क्या है?</span>
|
| 380 |
+
</td>
|
| 381 |
+
<td>
|
| 382 |
+
<i>'भारत की राजधानी क्या है?\nउत्तर – नई दिल्ली।' </i>
|
| 383 |
+
</td>
|
| 384 |
+
</tr>
|
| 385 |
+
<tr>
|
| 386 |
+
<td></td>
|
| 387 |
+
<td>
|
| 388 |
+
<span style="color: red;">भारत का राष्ट्रीय पक्षी क्या है?</span>
|
| 389 |
+
</td>
|
| 390 |
+
<td>
|
| 391 |
+
<i>भारत का राष्ट्रीय पक्षी कौन सा है?\nउत्तर – मोर! ...</i>
|
| 392 |
+
</td>
|
| 393 |
+
</tr>
|
| 394 |
+
|
| 395 |
+
</tbody>
|
| 396 |
+
</table>
|
| 397 |
+
|
| 398 |
+
</details>
|
| 399 |
+
|
| 400 |
+
## Cite as 🤗
|
| 401 |
+
|
| 402 |
+
```latex
|
| 403 |
+
|
| 404 |
+
```
|
| 405 |
+
|
| 406 |
+
## Aknowlegments
|
| 407 |
+
|
| 408 |
+
Polyglot is a project funded by the Federal Ministry of Education and Research (BMBF) and the Ministry of Culture and Science of the State of North Rhine-Westphalia (MWK) as part of TRA Sustainable Futures (University of Bonn) and the Excellence Strategy of the federal and state governments.
|
| 409 |
+
|
| 410 |
+
We also gratefully acknowledge the granted access to the [Marvin cluster](https://www.hpc.uni-bonn.de/en/systems/marvin) hosted by [University of Bonn](https://www.uni-bonn.de/en) along with the support provided by its High Performance Computing & Analytics Lab.
|
| 411 |
+
|
| 412 |
+
## License
|
| 413 |
+
|
| 414 |
+
LilMoo is licensed under the Apache License, Version 2.0. For more details, see the [LICENSE](LICENSE) file.
|
aggregate_npm.png
ADDED
|
Git LFS Details
|
benchmarks.png
ADDED
|
Git LFS Details
|
config.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"LlamaForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_bias": false,
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 1,
|
| 8 |
+
"eos_token_id": 2,
|
| 9 |
+
"head_dim": 96,
|
| 10 |
+
"hidden_act": "silu",
|
| 11 |
+
"hidden_size": 1536,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 3072,
|
| 14 |
+
"is_llama_config": true,
|
| 15 |
+
"max_position_embeddings": 4096,
|
| 16 |
+
"mlp_bias": false,
|
| 17 |
+
"model_type": "llama",
|
| 18 |
+
"num_attention_heads": 16,
|
| 19 |
+
"num_hidden_layers": 28,
|
| 20 |
+
"num_key_value_heads": 8,
|
| 21 |
+
"pad_token_id": 49109,
|
| 22 |
+
"pretraining_tp": 1,
|
| 23 |
+
"rms_norm_eps": 1e-06,
|
| 24 |
+
"rope_interleaved": false,
|
| 25 |
+
"rope_scaling": null,
|
| 26 |
+
"rope_theta": 50000.0,
|
| 27 |
+
"tie_word_embeddings": true,
|
| 28 |
+
"torch_dtype": "bfloat16",
|
| 29 |
+
"transformers_version": "4.53.2",
|
| 30 |
+
"use_cache": false,
|
| 31 |
+
"vocab_size": 49152
|
| 32 |
+
}
|
emissions.csv
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
timestamp,project_name,run_id,experiment_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue
|
| 2 |
+
2025-08-22T13:09:08,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,6990.111189811025,1.5052701870713112,0.0002153428101781,45.018562932,1249.5772916335343,70.0,0.0844276872517252,3.7356813407648986,0.1312499090389206,3.9513589370555486,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 3 |
+
2025-08-22T20:52:41,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,34803.51467724564,7.504215449886157,0.0002156165984808,45.0148114575,371.5310144892569,70.0,0.4203349795027105,18.6249090513042,0.6534446419484309,19.698688672755367,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 4 |
+
2025-08-23T06:31:33,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,69535.7843650789,15.005973557615294,0.0002158021757377,45.024083780625006,760.375665203829,70.0,0.8398083643033577,37.24557023587684,1.3055481818522168,39.39092678203253,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 5 |
+
2025-08-23T16:11:06,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,104308.25130017567,22.51623154961074,0.0002158624199806,45.019958895,1102.5556776692188,70.0,1.2597765779126908,55.88727659589662,1.9584240531515071,59.10547722696086,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 6 |
+
2025-08-24T01:50:06,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,139048.77266238397,30.0135568019265,0.0002158491314037,45.029613195,954.1780452874244,70.0,1.679362081217474,74.49601170148392,2.6107052353496463,78.78607901805093,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 7 |
+
2025-08-24T11:29:07,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,173789.32220524596,37.50848460614443,0.0002158273254662,45.01823380875,894.3834534457575,70.0,2.098949810913616,93.09844803980992,3.2629896178272726,98.4603874685508,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 8 |
+
2025-08-24T21:08:09,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,208531.461313332,45.012590208204166,0.0002158551516625,45.0174071025,1078.0144859263723,70.0,2.518549033387952,111.72494604238533,3.91529271318688,118.1587877889596,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 9 |
+
2025-08-25T06:47:23,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,243285.7368701948,52.51804437665513,0.0002158698041746,45.03291550714287,791.9757056344312,70.0,2.938295335792088,130.354609004715,4.567823777763689,137.86072811827046,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 10 |
+
2025-08-25T16:26:02,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,278004.1822971888,60.01302012057256,0.0002158709254827,45.008890560000005,363.5950669080862,70.0,3.3577911854199294,148.95740606082947,5.219965165018503,157.53516241126803,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 11 |
+
2025-08-26T02:04:38,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,312720.730738448,67.51256539187312,0.0002158877194756,45.01342642500001,377.4195442510557,70.0,3.7770893272893127,167.5727033447221,5.871799116603662,177.22159178861563,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 12 |
+
2025-08-26T11:44:03,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,347485.16503014974,75.02183550575685,0.0002158993909833,45.01871802000001,1249.661778163958,70.0,4.196966334335237,186.2120507272992,6.524531975657529,196.93354903729323,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 13 |
+
2025-08-26T21:23:36,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,382258.74185766,82.5257425757781,0.0002158897457118,45.01355589,370.029532923815,70.0,4.6169463020467765,204.8370622328521,7.17741967294312,216.6314282078438,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 14 |
+
2025-08-27T07:02:42,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,417004.18784887344,90.02969638102357,0.0002158963842676,45.0133679025,374.8489540815549,70.0,5.036569447179383,223.46310225283875,7.829758359105582,236.32943005912475,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 15 |
+
2025-08-27T16:42:17,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,451779.798253472,97.54031891043574,0.0002159023473106,45.0249337035,967.821049274507,70.0,5.456580574970412,242.10565658603704,8.482700259744044,256.0449374207527,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 16 |
+
2025-08-28T02:21:57,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,486559.4128692085,105.05314472874696,0.0002159102094218,45.016254513,367.1438420621576,70.0,5.876633331847595,260.75388813044367,9.135706989596155,275.7662284518886,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 17 |
+
2025-08-28T12:01:04,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,521306.26996725146,112.55786025227842,0.0002159150325572,45.021014865000005,565.3650753665761,70.0,6.296299775600878,279.38183363806587,9.78809641249912,295.46622982616725,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 18 |
+
2025-08-28T21:39:40,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,556022.2742515886,120.04730003744444,0.0002159037606884,45.028198748076925,1290.5364283088911,70.0,6.715580096024043,297.9706517199418,10.439900320649972,315.12613213661746,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 19 |
+
2025-08-29T07:19:01,Polyglot,7018bd5c-f3f7-4217-82bd-f99c47549636,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,590782.9242741596,127.55163552605332,0.0002159027119525,45.01229569875,365.9845712119681,70.0,7.135408322947885,316.5971697044776,11.09255788556135,334.8251359129894,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-284.30.1.el9_2.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.0562,50.7301,1950,machine,N,1.0
|
| 20 |
+
2025-09-12T17:53:42,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,35433.114145307,7.532744804032392,0.0002125905381373,45.01391094,386.52387169434695,70.0,0.4279788307983101,18.68029699117011,0.6653028734834993,19.773578695451874,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 21 |
+
2025-09-13T03:32:56,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,70187.390422579,15.037296655087864,0.0002142449885164,45.02323976785716,1066.6399218630286,70.0,0.8477376852737601,37.30754771073653,1.317865030785381,39.47315042679581,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 22 |
+
2025-09-13T13:15:40,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,105151.424477559,22.54777818545953,0.0002144315048273,45.01897185115385,1240.8022542978522,70.0,1.2700290855215488,55.9438915220218,1.974367056085538,59.18828766362916,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 23 |
+
2025-09-13T22:54:33,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,139883.685357171,30.04677362584501,0.0002147982700707,45.012118977,380.3497236211304,70.0,1.6895121501726251,74.5572595043711,2.626502071260429,78.87327372580393,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 24 |
+
2025-09-14T08:33:07,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,174597.569834102,37.544244138329134,0.0002150330280885,45.0136559925,435.9359600499232,70.0,2.108786793417967,93.1671681370084,3.278301896792551,98.55425682721912,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 25 |
+
2025-09-14T18:11:49,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,209319.856302423,45.03533358925786,0.000215150795461,45.01959633,377.8464410106921,70.0,2.5281532920135144,111.76008151993838,3.930254725409163,118.21848953736148,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 26 |
+
2025-09-15T03:50:50,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,244060.793060815,52.5393223627778,0.0002152714563608,45.01198368000001,375.67737826143554,70.0,2.9477443572340967,130.38628400255533,4.5825548211472045,137.91658318093658,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 27 |
+
2025-09-15T13:30:04,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,278815.183456241,60.04037351907612,0.0002153411187109,45.0088320375,361.2342277014224,70.0,3.36749429197627,149.00436996506772,5.235101274066568,157.60696553110938,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 28 |
+
2025-09-15T23:08:49,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,313539.578846906,67.54012742818254,0.0002154118075828,45.03400875346154,1137.4047317550212,70.0,3.7868906546986447,167.61995127863173,5.8871006527920855,177.29394258612032,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 29 |
+
2025-09-16T08:47:42,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,348273.277568986,75.04127816726923,0.0002154666550677,45.0119925225,382.4869495391289,70.0,4.2063684603095535,186.238990845796,6.539227036642861,196.98458634274635,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 30 |
+
2025-09-16T18:26:34,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,383005.49151103495,82.54219112970335,0.0002155117693066,45.02270651192308,1233.4869479832737,70.0,4.625860606177265,204.85737001659828,7.191375308985193,216.6746059317584,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 31 |
+
2025-09-17T04:05:27,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,417738.210323712,90.03485143164706,0.000215529365537,45.01344195,387.6667804673638,70.0,5.045355418659748,223.45410162286103,7.843505110068333,236.34296215158696,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 32 |
+
2025-09-17T13:43:34,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,452424.92944206495,97.51795576735452,0.0002155450538227,45.03121686,1018.4531962215392,70.0,5.464296292931893,242.0271414410041,8.49479606933338,255.98623380326688,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 33 |
+
2025-09-17T23:22:14,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,487144.746918214,105.01332261072926,0.000215569034204,45.01448735625,378.41232254417815,70.0,5.883628784547616,260.6313703046525,9.146695649557808,275.6616947387564,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 34 |
+
2025-09-18T09:00:50,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,521860.817198245,112.50745463750664,0.0002155890055925,45.04347873,907.6842160170957,70.0,6.302923856185871,279.2324532157838,9.798537188451157,295.333914260419,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 35 |
+
2025-09-18T10:26:27,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,526998.110699902,113.61576632911851,0.0002155904623229,45.01736437500001,417.4374265137234,70.0,6.3651504787673066,281.982824699968,9.89527545551898,298.2432506342526,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 36 |
+
2025-09-18T18:40:58,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,556669.022508882,120.0213829581797,0.0002156063623178,45.010935,394.782799134864,70.0,6.723696172654983,297.8817287746374,10.45267443105286,315.05809937834283,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 37 |
+
2025-09-19T04:19:33,Polyglot,9322e9ce-6df0-4d9d-925f-b81d7e41dc33,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,591384.027183893,127.51335181287196,0.0002156185252754,45.01202428800001,384.39380573741846,70.0,7.142959703988181,316.4772134082361,11.10446742557008,334.72464053779225,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 38 |
+
2025-09-26T00:51:36,Polyglot,b2ab499e-f674-4146-973b-8293ebeecc78,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,35338.602089488995,7.482491370132026,0.0002117370503559,45.02278136045455,1067.7551732054185,70.0,0.4268859943501147,18.551227252358824,0.6635493602789603,19.64166260698786,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
| 39 |
+
2025-09-26T09:37:51,Polyglot,b2ab499e-f674-4146-973b-8293ebeecc78,5b0fa12a-3dd7-45bb-9766-cc326314d9f1,66914.01955960004,14.306468730226216,0.0002138037562888223,45.02641663125001,1867.574117815583,70.0,0.8082526912175959,35.490039767564674,1.2564226750311307,37.5547151338134,Germany,DEU,north rhine-westphalia,,,Linux-5.14.0-570.35.1.el9_6.x86_64-x86_64-with-glibc2.34,3.12.3,3.0.4,256,AMD EPYC 7713 64-Core Processor,4,4 x NVIDIA A100-SXM4-80GB,7.1178,50.7246,1950,machine,N,1.0
|
evals.yaml
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
evaluations:
|
| 2 |
+
ARC_Challenge_Acc: 0.2482876712328767
|
| 3 |
+
ARC_Challenge_Acc_norm: 0.300513698630137
|
| 4 |
+
HellaSwag_Acc: 0.3172259983007646
|
| 5 |
+
HellaSwag_Acc_norm: 0.3706457094307561
|
| 6 |
+
MMLU_Acc: 0.259306906810324
|
| 7 |
+
MMLU_Acc_norm: 0.2619602798102436
|
| 8 |
+
MILU_Acc: 0.2751668801833996
|
| 9 |
+
MILU_Acc_norm: 0.2855505360393769
|
| 10 |
+
CommonsenseQA_Acc: 0.4215424018212862
|
| 11 |
+
CommonsenseQA_Acc_norm: 0.3712293682413204
|
| 12 |
+
Global_PIQA_Acc: 0.58
|
| 13 |
+
Global_PIQA_Acc_norm: 0.65
|
| 14 |
+
step: '179590'
|
evals_all_steps.csv
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
step,arc_hi_acc,arc_hi_acc_norm,hellaswag_hi_acc,hellaswag_hi_acc_norm,mmlu_hi_acc,mmlu_hi_acc_norm,truthfulqa_hi_mc1,truthfulqa_hi_mc2,copa_hi_acc,copa_hi_acc_norm,iitp_mr_hi_acc,iitp_mr_hi_acc_norm,indicxnli_acc,indicxnli_acc_norm,milu_hi_acc,milu_hi_acc_norm,csqa_hi_acc,csqa_hi_acc_norm,global_piqa_acc,global_piqa_acc_norm
|
| 2 |
+
05000,0.2131849315068493,0.2525684931506849,0.2818606627017841,0.3106414613423959,0.2432258583259628,0.2584224491436842,0.240620957309185,0.4243197005669198,0.532293986636971,0.5278396436525612,0.3870967741935484,0.3870967741935484,0.3759036144578313,0.3823293172690763,0.253927584114355,0.2639066819499696,0.3874786568013659,0.346812749003984,0.55,0.57
|
| 3 |
+
10000,0.2260273972602739,0.2714041095890411,0.2896134239592183,0.3217926932880204,0.243145453083541,0.2581812334164187,0.2380336351875808,0.4097685409614982,0.5412026726057907,0.5590200445434298,0.3193548387096774,0.3193548387096774,0.3991967871485943,0.3895582329317269,0.2614793338277931,0.2676151304699615,0.3915196357427433,0.3476380193511668,0.52,0.59
|
| 4 |
+
15000,0.2303082191780822,0.2773972602739726,0.2935429056924384,0.328695836873407,0.2503015196590817,0.2576183967194661,0.222509702457956,0.4030135472163406,0.5367483296213809,0.5189309576837416,0.332258064516129,0.332258064516129,0.3843373493975903,0.4040160642570281,0.2444878969725575,0.2464432607376441,0.4028457598178713,0.3581673306772908,0.53,0.62
|
| 5 |
+
20000,0.2320205479452055,0.2731164383561644,0.2970475785896346,0.3382540356839422,0.2472461204470531,0.2651764895071158,0.2289780077619663,0.4250298003686678,0.5256124721603563,0.534521158129176,0.3161290322580645,0.3161290322580645,0.3947791164658634,0.3879518072289156,0.2595913963994336,0.2633672712561526,0.3999146272054638,0.357057484348321,0.53,0.63
|
| 6 |
+
25000,0.2328767123287671,0.2791095890410959,0.3033135089209856,0.338678844519966,0.2443515317198681,0.2546434027498593,0.2199223803363518,0.4061963597209867,0.534521158129176,0.5523385300668151,0.3096774193548387,0.3096774193548387,0.4212851405622489,0.4128514056224899,0.2696379205717753,0.276447980581215,0.404667046101309,0.3553215708594194,0.56,0.58
|
| 7 |
+
30000,0.2345890410958904,0.2756849315068493,0.3014018691588785,0.3445199660152931,0.2538393503256412,0.2614778483557128,0.2212160413971539,0.4090077039627854,0.5478841870824054,0.5389755011135857,0.3677419354838709,0.3677419354838709,0.4196787148594377,0.4032128514056224,0.2693682152248668,0.2752343065201267,0.407455890722823,0.361126920887877,0.56,0.61
|
| 8 |
+
35000,0.2422945205479452,0.2928082191780822,0.3039507221750212,0.3452633814783347,0.2523116507196269,0.2590656910830586,0.2212160413971539,0.4090758306363093,0.5434298440979956,0.5211581291759465,0.3354838709677419,0.3354838709677419,0.4349397590361445,0.4228915662650602,0.2605353651136133,0.2676151304699615,0.4077689243027888,0.3605008537279454,0.55,0.58
|
| 9 |
+
40000,0.2440068493150685,0.273972602739726,0.3020390824129141,0.3463254035683942,0.2569751547800916,0.2645332475677414,0.2134540750323415,0.3981797689376249,0.5367483296213809,0.5189309576837416,0.3161290322580645,0.3161290322580645,0.4184738955823293,0.4072289156626506,0.2657946193783291,0.2697727732452296,0.413915765509391,0.3647410358565737,0.54,0.63
|
| 10 |
+
45000,0.2303082191780822,0.2842465753424658,0.3036321155480034,0.3491928632115548,0.2585832596285278,0.2604325802042293,0.2263906856403622,0.3919191088196193,0.5300668151447662,0.5256124721603563,0.3903225806451613,0.3903225806451613,0.4265060240963855,0.410441767068273,0.2570966219405299,0.2635021239296069,0.4096186681844052,0.3626920887877063,0.57,0.61
|
| 11 |
+
50000,0.2337328767123287,0.285958904109589,0.306393372982158,0.3529099405267629,0.2509447615984562,0.2632467636889925,0.2263906856403622,0.3897103146983419,0.5478841870824054,0.5456570155902004,0.3,0.3,0.423293172690763,0.4024096385542168,0.2670757197761446,0.2717281370103162,0.4149402390438247,0.3669607285145134,0.57,0.63
|
| 12 |
+
55000,0.2397260273972602,0.2799657534246575,0.308411214953271,0.3524851316907391,0.2529548926590013,0.258502854386106,0.2302716688227684,0.4024781634587988,0.5590200445434298,0.5590200445434298,0.3677419354838709,0.3677419354838709,0.4385542168674698,0.4156626506024096,0.2628278605623356,0.2700424785921381,0.4075412635173591,0.3637450199203187,0.56,0.64
|
| 13 |
+
60000,0.238013698630137,0.2825342465753425,0.3074553950722175,0.3537595581988105,0.2570555600225134,0.2626035217496181,0.2160413971539456,0.3971905543851754,0.5501113585746102,0.5144766146993318,0.332258064516129,0.332258064516129,0.4325301204819277,0.4244979919678714,0.2548041264918077,0.265727193041602,0.4093910073989755,0.3644564598747866,0.54,0.63
|
| 14 |
+
65000,0.2431506849315068,0.2868150684931507,0.3117034834324554,0.3600254885301614,0.2552062394468119,0.2617994693254,0.2212160413971539,0.3919504225776677,0.5434298440979956,0.5412026726057907,0.3258064516129032,0.3258064516129032,0.4301204819277108,0.4244979919678714,0.2749646011732182,0.2841345829681073,0.4182413204325555,0.3660785429709732,0.54,0.64
|
| 15 |
+
70000,0.2422945205479452,0.2910958904109589,0.3096856414613424,0.3601316907391674,0.2571359652649352,0.2588244753557931,0.2212160413971539,0.3960414345598643,0.5545657015590201,0.5367483296213809,0.3193548387096774,0.3193548387096774,0.4333333333333333,0.4204819277108433,0.2677499831434158,0.2766502595913964,0.4126920887877063,0.3661070005691519,0.53,0.62
|
| 16 |
+
75000,0.2397260273972602,0.2936643835616438,0.312340696686491,0.360768903993203,0.2581812334164187,0.2585832596285278,0.2315653298835705,0.4058531120966335,0.5456570155902004,0.5278396436525612,0.3677419354838709,0.3677419354838709,0.4156626506024096,0.4040160642570281,0.2750994538466725,0.2836625986110174,0.4128059191804212,0.363118952760387,0.56,0.6
|
| 17 |
+
80000,0.2517123287671233,0.300513698630137,0.3092608326253186,0.3614061172472387,0.2599501487496984,0.2659001366889121,0.2393272962483829,0.4098130561192677,0.5590200445434298,0.5278396436525612,0.3838709677419354,0.3838709677419354,0.4253012048192771,0.4160642570281124,0.2696379205717753,0.2805609871215697,0.4168184405236198,0.3679852020489471,0.57,0.65
|
| 18 |
+
85000,0.2482876712328767,0.2928082191780822,0.3121282922684791,0.3632115548003398,0.2561711023558736,0.2609150116587601,0.2354463130659767,0.400331304008595,0.5590200445434298,0.532293986636971,0.364516129032258,0.364516129032258,0.4277108433734939,0.4112449799196787,0.2730092374081316,0.2833254669273818,0.4178144564598748,0.3682697780307342,0.54,0.61
|
| 19 |
+
90000,0.2508561643835616,0.2936643835616438,0.3127655055225148,0.3639549702633814,0.2470853099622095,0.2592265015679022,0.2276843467011643,0.3985713129896215,0.5590200445434298,0.534521158129176,0.2838709677419355,0.2838709677419355,0.4220883534136546,0.3995983935742971,0.2868316364371924,0.298428966354258,0.4152817302219693,0.3689243027888446,0.58,0.64
|
| 20 |
+
95000,0.2568493150684932,0.2928082191780822,0.3143585386576041,0.3671410365335599,0.2546434027498593,0.2626035217496181,0.2276843467011643,0.4079544284856502,0.5434298440979956,0.5389755011135857,0.2967741935483871,0.2967741935483871,0.4212851405622489,0.4128514056224899,0.2754365855303081,0.2821117928662935,0.4171030165054069,0.3688958451906659,0.55,0.63
|
| 21 |
+
100000,0.2534246575342466,0.288527397260274,0.3129779099405267,0.3683092608326253,0.2544825922650157,0.2626839269920399,0.2289780077619663,0.4012834314532906,0.5523385300668151,0.5389755011135857,0.3258064516129032,0.3258064516129032,0.4180722891566265,0.4112449799196787,0.2836625986110174,0.2932371384262693,0.419379624359704,0.3701479795105293,0.55,0.66
|
| 22 |
+
105000,0.2594178082191781,0.2902397260273973,0.3152081563296516,0.368840271877655,0.256412318083139,0.262201495537509,0.2315653298835705,0.4010640454510209,0.5367483296213809,0.532293986636971,0.3129032258064516,0.3129032258064516,0.4228915662650602,0.4028112449799196,0.2771222439484863,0.2850785516822871,0.4182697780307342,0.3677575412635173,0.58,0.66
|
| 23 |
+
110000,0.2491438356164383,0.2962328767123288,0.3174384027187765,0.366928632115548,0.2565731285679826,0.2604325802042293,0.2328589909443725,0.4124873621208749,0.5434298440979956,0.532293986636971,0.3451612903225806,0.3451612903225806,0.4152610441767068,0.4076305220883534,0.2829209089070191,0.2913492009979098,0.4215139442231075,0.3701195219123506,0.56,0.63
|
| 24 |
+
115000,0.25,0.2979452054794521,0.3176508071367884,0.3695836873406967,0.2558494813861864,0.2605933906890729,0.2263906856403622,0.3995155428413429,0.5545657015590201,0.5256124721603563,0.3258064516129032,0.3258064516129032,0.4200803212851405,0.4124497991967871,0.2707167419594093,0.2775942283055762,0.4183551508252703,0.3678998292544109,0.6,0.65
|
| 25 |
+
120000,0.2525684931506849,0.300513698630137,0.3137213254035684,0.3677782497875956,0.253919755568063,0.258904880598215,0.2276843467011643,0.4002037635409254,0.5567928730512249,0.5412026726057907,0.3096774193548387,0.3096774193548387,0.4188755020080321,0.410441767068273,0.2829883352437462,0.2885172948553705,0.4190096755833807,0.3685828116107,0.58,0.64
|
| 26 |
+
125000,0.2482876712328767,0.300513698630137,0.3166949872557349,0.3684154630416312,0.2609150116587601,0.2630859532041489,0.2289780077619663,0.4028802361780124,0.5501113585746102,0.5501113585746102,0.3,0.3,0.423293172690763,0.4116465863453815,0.2813026768255681,0.2897983952531859,0.421371656232214,0.3715993170176437,0.6,0.65
|
| 27 |
+
130000,0.2431506849315068,0.2945205479452055,0.3168011894647408,0.3704333050127442,0.2558494813861864,0.2609150116587601,0.2380336351875808,0.4034970782739727,0.5590200445434298,0.5412026726057907,0.2903225806451613,0.2903225806451613,0.4313253012048192,0.4208835341365461,0.2841345829681073,0.2922257433753624,0.4212862834376779,0.3703471826977803,0.58,0.66
|
| 28 |
+
135000,0.2440068493150685,0.2979452054794521,0.316482582837723,0.3686278674596431,0.2575379914770443,0.2617994693254,0.2367399741267787,0.4061901392457935,0.5545657015590201,0.5300668151447662,0.3064516129032258,0.3064516129032258,0.4309236947791164,0.4184738955823293,0.2732789427550401,0.2831231879172005,0.4206886738759248,0.3704040978941377,0.57,0.66
|
| 29 |
+
140000,0.2491438356164383,0.2919520547945205,0.316588785046729,0.3701146983857264,0.2564927233255608,0.2585832596285278,0.2367399741267787,0.4030490917934246,0.5456570155902004,0.532293986636971,0.3258064516129032,0.3258064516129032,0.4269076305220883,0.4068273092369477,0.2713910053266806,0.2798867237542984,0.4241035856573705,0.3713431986340353,0.6,0.65
|
| 30 |
+
145000,0.2534246575342466,0.3022260273972603,0.3180756159728122,0.3693712829226848,0.2540001608104848,0.2605933906890729,0.2367399741267787,0.409437747794674,0.5545657015590201,0.532293986636971,0.3096774193548387,0.3096774193548387,0.4204819277108433,0.4088353413654618,0.2782010653361202,0.2860225203964668,0.4207455890722823,0.370176437108708,0.6,0.7
|
| 31 |
+
150000,0.25,0.300513698630137,0.3158453695836873,0.3685216652506372,0.2580204229315751,0.2609954169011819,0.2341526520051746,0.4073158343694472,0.5545657015590201,0.5278396436525612,0.2967741935483871,0.2967741935483871,0.4192771084337349,0.4100401606425702,0.2796170184073899,0.2879778841615535,0.4210017074558907,0.37057484348321,0.59,0.66
|
| 32 |
+
155000,0.2534246575342466,0.2970890410958904,0.3160577740016992,0.3701146983857264,0.2556886709013428,0.258502854386106,0.2354463130659767,0.4059185722804225,0.5523385300668151,0.5367483296213809,0.3387096774193548,0.3387096774193548,0.4265060240963855,0.4120481927710843,0.2767851122648506,0.2857528150495583,0.4208025042686397,0.3697495731360273,0.59,0.67
|
| 33 |
+
160000,0.2508561643835616,0.300513698630137,0.3158453695836873,0.3701146983857264,0.2572967757497789,0.2587440701133714,0.2354463130659767,0.4034919247403767,0.5567928730512249,0.532293986636971,0.3064516129032258,0.3064516129032258,0.4281124497991967,0.4060240963855421,0.2779313599892118,0.2860225203964668,0.4216277746158224,0.3704040978941377,0.6,0.65
|
| 34 |
+
165000,0.2534246575342466,0.2979452054794521,0.3176508071367884,0.3719201359388275,0.2578596124467315,0.2611562273860255,0.240620957309185,0.4072788192460527,0.5545657015590201,0.5389755011135857,0.267741935483871,0.267741935483871,0.4301204819277108,0.4108433734939759,0.275908569887398,0.2857528150495583,0.4194365395560614,0.3689527603870233,0.59,0.65
|
| 35 |
+
170000,0.247431506849315,0.3030821917808219,0.3172259983007646,0.3710705182667799,0.2566535338104044,0.2592265015679022,0.2380336351875808,0.4024878931449278,0.5545657015590201,0.5278396436525612,0.2806451612903226,0.2806451612903226,0.4261044176706827,0.4120481927710843,0.2785381970197559,0.2863596520801025,0.4219977233921457,0.3725099601593625,0.6,0.64
|
| 36 |
+
175000,0.2517123287671233,0.2988013698630137,0.3162701784197111,0.3703271028037383,0.2562515075982954,0.2624427112647745,0.2367399741267787,0.4031180556670132,0.5545657015590201,0.532293986636971,0.3032258064516129,0.3032258064516129,0.4144578313253012,0.3987951807228915,0.2763131279077607,0.286089946733194,0.4210870802504268,0.3706886738759248,0.59,0.66
|
| 37 |
+
179590,0.2482876712328767,0.300513698630137,0.3172259983007646,0.3706457094307561,0.259306906810324,0.2619602798102436,0.2393272962483829,0.407889506940758,0.5545657015590201,0.5389755011135857,0.3451612903225806,0.3451612903225806,0.423293172690763,0.4080321285140562,0.2751668801833996,0.2855505360393769,0.4215424018212862,0.3712293682413204,0.58,0.65
|
evals_for_reference.csv
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Model,arc_hi_acc,arc_hi_acc_norm,hellaswag_hi_acc,hellaswag_hi_acc_norm,mmlu_hi_acc,mmlu_hi_acc_norm,truthfulqa_hi_mc1,truthfulqa_hi_mc2,copa_hi_acc,copa_hi_acc_norm,iitp_mr_hi_acc,iitp_mr_hi_acc_norm,indicxnli_acc,indicxnli_acc_norm,milu_hi_acc,milu_hi_acc_norm,csqa_hi_acc,csqa_hi_acc_norm,global_piqa_acc,global_piqa_acc_norm
|
| 2 |
+
Llama-3.2-1B-Instruct,0.2260273972602739,0.2611301369863014,0.2911002548853016,0.3273152081563296,0.2823832113853823,0.2944439977486532,0.2755498059508409,0.4671780924298416,0.512249443207127,0.4988864142538975,0.535483870967742,0.535483870967742,0.3983935742971887,0.3907630522088353,0.2848088463353786,0.298428966354258,0.3638303927148548,0.3287990893568582,,
|
| 3 |
+
Qwen3-0.6B,0.2063356164383561,0.2602739726027397,0.2689039932030586,0.2890824129141886,0.2905041408699847,0.2965345340516201,0.2561448900388098,0.4567232616277077,0.5367483296213809,0.5033407572383074,0.4419354838709677,0.4419354838709677,0.3702811244979919,0.3642570281124498,0.2751668801833996,0.2886521475288247,0.315509391007399,0.2995731360273193,0.46,0.54
|
| 4 |
+
HindiLLM-medium,0.1900684931506849,0.2226027397260274,0.2672047578589634,0.2804800339847069,0.2378387070837018,0.2628447374768834,0.2509702457956015,0.4469643888228802,0.534521158129176,0.5367483296213809,0.3935483870967742,0.3935483870967742,0.3433734939759036,0.3389558232931727,0.2548715528285348,0.2704470366125008,0.3596186681844052,0.322965281730222,0.48,0.6
|
| 5 |
+
Qwen2.5-3B,0.2311643835616438,0.2662671232876712,0.2907816482582838,0.3276338147833475,0.3610195384739085,0.3572404920800836,0.2652005174644243,0.4380496715887901,0.5233853006681515,0.4988864142538975,0.5516129032258065,0.5516129032258065,0.4096385542168674,0.3995983935742971,0.3645742026835682,0.3658553030813836,0.3543540125213432,0.3223676721684689,,
|
| 6 |
+
gemma-3-1b-it,0.2106164383561644,0.2465753424657534,0.2892948173322005,0.3177570093457944,0.2830264533247568,0.2922730562032644,0.2897800776196636,0.4721026764339953,0.532293986636971,0.5278396436525612,0.5064516129032258,0.5064516129032258,0.3967871485943775,0.395582329317269,0.2934394174364507,0.3004517564560717,0.3598178713716562,0.3206886738759248,,
|
| 7 |
+
Qwen3-1.7B,0.2559931506849315,0.285958904109589,0.284197111299915,0.3181818181818182,0.3558736029589129,0.360778322746643,0.2690815006468305,0.4728831115100759,0.5679287305122495,0.5389755011135857,0.5161290322580645,0.5161290322580645,0.4012048192771084,0.3919678714859437,0.3679455195199245,0.369833456948284,0.3369664200341491,0.3178998292544109,,
|
| 8 |
+
Qwen2.5-1.5B,0.2226027397260274,0.2363013698630137,0.2803738317757009,0.3026762956669498,0.3132588244753558,0.3159926027176972,0.276843467011643,0.4529494179609675,0.5256124721603563,0.5077951002227171,0.3612903225806451,0.3612903225806451,0.3799196787148594,0.3791164658634538,0.3039579259658823,0.310296001618232,0.3336368810472396,0.3065167899829254,,
|
| 9 |
+
Llama-3.2-3B-Instruct,0.2731164383561644,0.3073630136986301,0.330607476635514,0.3948598130841121,0.3315912197475275,0.332636487899011,0.2664941785252264,0.446896635332512,0.5278396436525612,0.512249443207127,0.5806451612903226,0.5806451612903226,0.4124497991967871,0.4128514056224899,0.3605960488166678,0.3645742026835682,0.3844052361980649,0.3511098463289698,,
|
| 10 |
+
Qwen-2.5-1.5B-Instruct,0.2268835616438356,0.2422945205479452,0.280267629566695,0.3039507221750212,0.311087882929967,0.3208973225054273,0.2742561448900388,0.4536794735165977,0.5501113585746102,0.5278396436525612,0.3967741935483871,0.3967741935483871,0.385140562248996,0.3811244979919678,0.3040253523026094,0.3086103432000539,0.3342629482071713,0.3067444507683551,,
|
| 11 |
+
Airavata,0.2243150684931507,0.2645547945205479,0.2948173322005097,0.3322005097706032,0.272171745597813,0.2854386105974109,0.2626131953428202,0.4500189832322431,0.5456570155902004,0.5256124721603563,0.3516129032258064,0.3516129032258064,0.4281124497991967,0.3971887550200803,0.2811003978153867,0.2912817746611826,0.3560045532157086,0.3366818440523619,0.58,0.62
|
| 12 |
+
Qwen2.5-3B-Instruct,0.247431506849315,0.2799657534246575,0.2933305012744265,0.3292268479184367,0.3762965345340516,0.3688992522312455,0.2613195342820181,0.4348808885322328,0.5300668151447662,0.5011135857461024,0.5612903225806452,0.5612903225806452,0.3887550200803213,0.3847389558232931,0.3796777021104443,0.3800822601308071,0.357057484348321,0.3261525327262379,,
|
| 13 |
+
Llama-3.2-3B,0.2636986301369863,0.3056506849315068,0.3280586236193712,0.4034621920135939,0.3190480019297258,0.324354747929565,0.2677878395860285,0.4396524010886695,0.532293986636971,0.5011135857461024,0.5483870967741935,0.5483870967741935,0.3859437751004016,0.3771084337349397,0.3562807632661317,0.3617422965410289,0.3955036994877632,0.3668184405236198,0.54,0.64
|
| 14 |
+
muril-large-cased,0.2208904109589041,0.2525684931506849,0.2512744265080713,0.2582837723024639,0.2241698158719948,0.2434670740532282,0.2276843467011643,0.5001468455946757,0.4766146993318486,0.4810690423162583,0.3838709677419354,0.3838709677419354,0.3526104417670683,0.3317269076305221,0.2301260872496797,0.241588564493291,0.2903244166192373,0.2646272054638588,,
|
| 15 |
+
HindiLLM-small,0.1960616438356164,0.2320205479452055,0.2507434154630416,0.2584961767204758,0.2285921042051941,0.2593873120527458,0.2871927554980595,0.4846568348323389,0.512249443207127,0.5300668151447662,0.3161290322580645,0.3161290322580645,0.3253012048192771,0.3385542168674699,0.2519722203492684,0.2643786663070595,0.2999715424018213,0.2796528173022197,,
|
| 16 |
+
Qwen3-0.6B-Base,0.2277397260273972,0.2748287671232877,0.2751699235344095,0.2982158028887001,0.3098818042936399,0.3105250462330144,0.2794307891332471,0.4709799653282344,0.5389755011135857,0.534521158129176,0.3612903225806451,0.3612903225806451,0.3859437751004016,0.3582329317269076,0.3010585934866159,0.3093520329040523,0.3347467273762095,0.3096471257825839,0.49,0.53
|
| 17 |
+
Qwen3-1.7B-Base,0.2722602739726027,0.3013698630136986,0.2980033984706882,0.3411214953271028,0.3853019216852938,0.3827289539277961,0.278137128072445,0.4516788347373374,0.5523385300668151,0.5233853006681515,0.603225806451613,0.603225806451613,0.4228915662650602,0.4273092369477911,0.3922190007416897,0.3897916526195132,0.3634604439385316,0.3343767785998862,,
|
| 18 |
+
muril-base-cased,0.2208904109589041,0.2688356164383562,0.2527612574341546,0.241928632115548,0.2313258824475355,0.2408941062957305,0.2328589909443725,0.4986325168680121,0.5033407572383074,0.4766146993318486,0.332258064516129,0.332258064516129,0.3413654618473896,0.3313253012048193,0.2289124131885914,0.246375834400917,0.3449630051223676,0.3018497438816164,,
|
| 19 |
+
Qwen-2.5-0.5B-Instruct,0.2020547945205479,0.2337328767123287,0.2693288020390824,0.2875955819881053,0.2802122698399936,0.2872879311731124,0.2923673997412678,0.4801813355602639,0.5545657015590201,0.5256124721603563,0.3580645161290323,0.3580645161290323,0.3429718875502008,0.3365461847389558,0.2729418110714044,0.2831231879172005,0.3253272623790552,0.3011667615253273,,
|
| 20 |
+
gemma-3-1b-pt,0.2183219178082192,0.2491438356164383,0.2965165675446049,0.3259345794392523,0.2528744874165796,0.2633271689314143,0.2858990944372574,0.4718021928835578,0.4988864142538975,0.4832962138084632,0.3935483870967742,0.3935483870967742,0.3706827309236948,0.3618473895582329,0.2518373676758141,0.2625581552154271,0.3932555492316448,0.3592771770062606,0.52,0.64
|
| 21 |
+
Qwen-2.5-0.5B,0.2011986301369863,0.2371575342465753,0.2684791843670348,0.286321155480034,0.2720913403553912,0.2778805178097612,0.2871927554980595,0.482118582921091,0.5835189309576837,0.5367483296213809,0.4064516129032258,0.4064516129032258,0.329718875502008,0.3321285140562249,0.272132695030679,0.2848088463353786,0.3226237905520774,0.3001138303927148,0.5,0.57
|
| 22 |
+
Llama-3.2-1B,0.2337328767123287,0.2654109589041096,0.2920560747663551,0.3299702633814783,0.2615582535981346,0.2796494331430409,0.2690815006468305,0.4549373920832709,0.5367483296213809,0.5144766146993318,0.3806451612903225,0.3806451612903225,0.3939759036144578,0.3726907630522088,0.2731440900815858,0.2910794956510013,0.3702902675014228,0.3365964712578258,0.48,0.55
|
| 23 |
+
OpenHathi-7B-Hi-v0.1-Base,0.2328767123287671,0.273972602739726,0.3093670348343245,0.3557774001699235,0.2746643081128889,0.2878507678700651,0.2496765847347994,0.4451412752203544,0.5590200445434298,0.5300668151447662,0.3838709677419354,0.3838709677419354,0.4301204819277108,0.4096385542168674,0.2952599285280831,0.307598948149147,0.3616676152532726,0.3335799658508822,,
|
| 24 |
+
gemma-2-2b-it,0.2217465753424657,0.261986301369863,0.2918436703483432,0.3242353440951572,0.3085149151724692,0.3148669293237919,0.2949547218628719,0.4972516625270038,0.512249443207127,0.5011135857461024,0.6161290322580645,0.6161290322580645,0.4040160642570281,0.4128514056224899,0.3588429640617625,0.3663272874384735,0.3630620375640296,0.3214570290267501,,
|
| 25 |
+
gemma-2-2b,0.2046232876712329,0.2491438356164383,0.2615760407816482,0.2854715378079864,0.2445927474471335,0.2597089330224331,0.2949547218628719,0.5016770009850119,0.5167037861915368,0.512249443207127,0.3967741935483871,0.3967741935483871,0.4220883534136546,0.4080321285140562,0.3298496392690985,0.3380756523498078,0.3724530449630051,0.3406659077973819,,
|
generation_config.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 1,
|
| 3 |
+
"eos_token_id": 2,
|
| 4 |
+
"pad_token_id": 49109,
|
| 5 |
+
"transformers_version": "4.53.2",
|
| 6 |
+
"do_sample": true,
|
| 7 |
+
"max_new_tokens": 1024,
|
| 8 |
+
"renormalize_logits": true,
|
| 9 |
+
"repetition_penalty": 1.2,
|
| 10 |
+
"temperature": 0.1,
|
| 11 |
+
"top_k": 50,
|
| 12 |
+
"top_p": 1.0,
|
| 13 |
+
"use_cache": false
|
| 14 |
+
}
|
learning_curves.png
ADDED
|
Git LFS Details
|
logo_lilmoo.png
ADDED
|
Git LFS Details
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:48f26ffc81a358081d287bbc2275f164f6246dfbd47a061537560e1df0e7f95b
|
| 3 |
+
size 1340284304
|
performance_vs_compute.png
ADDED
|
Git LFS Details
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<tools>",
|
| 4 |
+
"</tools>",
|
| 5 |
+
"<tool_call>",
|
| 6 |
+
"</tool_call>",
|
| 7 |
+
"<tool_response>",
|
| 8 |
+
"</tool_response>",
|
| 9 |
+
"<think>",
|
| 10 |
+
"</think>",
|
| 11 |
+
"<answer>",
|
| 12 |
+
"</answer>",
|
| 13 |
+
"<context>",
|
| 14 |
+
"</context>",
|
| 15 |
+
"<|fim_prefix|>",
|
| 16 |
+
"<|fim_suffix|>",
|
| 17 |
+
"<|fim_middle|>",
|
| 18 |
+
"<|repo_name|>",
|
| 19 |
+
"<|image|>",
|
| 20 |
+
"<|image_pad|>",
|
| 21 |
+
"<|image_placeholder|>",
|
| 22 |
+
" ",
|
| 23 |
+
" ",
|
| 24 |
+
" ",
|
| 25 |
+
" ",
|
| 26 |
+
" ",
|
| 27 |
+
" ",
|
| 28 |
+
" ",
|
| 29 |
+
" ",
|
| 30 |
+
" ",
|
| 31 |
+
" ",
|
| 32 |
+
" ",
|
| 33 |
+
" ",
|
| 34 |
+
" ",
|
| 35 |
+
" ",
|
| 36 |
+
" ",
|
| 37 |
+
" ",
|
| 38 |
+
" ",
|
| 39 |
+
" ",
|
| 40 |
+
" ",
|
| 41 |
+
" ",
|
| 42 |
+
" ",
|
| 43 |
+
" ",
|
| 44 |
+
" "
|
| 45 |
+
],
|
| 46 |
+
"bos_token": {
|
| 47 |
+
"content": "<|im_start|>",
|
| 48 |
+
"lstrip": false,
|
| 49 |
+
"normalized": false,
|
| 50 |
+
"rstrip": false,
|
| 51 |
+
"single_word": false
|
| 52 |
+
},
|
| 53 |
+
"eos_token": {
|
| 54 |
+
"content": "<|im_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false
|
| 59 |
+
},
|
| 60 |
+
"pad_token": {
|
| 61 |
+
"content": "<|pad|>",
|
| 62 |
+
"lstrip": false,
|
| 63 |
+
"normalized": false,
|
| 64 |
+
"rstrip": false,
|
| 65 |
+
"single_word": false
|
| 66 |
+
},
|
| 67 |
+
"unk_token": {
|
| 68 |
+
"content": "<|unk|>",
|
| 69 |
+
"lstrip": false,
|
| 70 |
+
"normalized": false,
|
| 71 |
+
"rstrip": false,
|
| 72 |
+
"single_word": false
|
| 73 |
+
}
|
| 74 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,436 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<|unk|>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<|im_start|>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "<|im_end|>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
},
|
| 30 |
+
"49109": {
|
| 31 |
+
"content": "<|pad|>",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false,
|
| 36 |
+
"special": true
|
| 37 |
+
},
|
| 38 |
+
"49110": {
|
| 39 |
+
"content": "<tools>",
|
| 40 |
+
"lstrip": false,
|
| 41 |
+
"normalized": false,
|
| 42 |
+
"rstrip": false,
|
| 43 |
+
"single_word": false,
|
| 44 |
+
"special": true
|
| 45 |
+
},
|
| 46 |
+
"49111": {
|
| 47 |
+
"content": "</tools>",
|
| 48 |
+
"lstrip": false,
|
| 49 |
+
"normalized": false,
|
| 50 |
+
"rstrip": false,
|
| 51 |
+
"single_word": false,
|
| 52 |
+
"special": true
|
| 53 |
+
},
|
| 54 |
+
"49112": {
|
| 55 |
+
"content": "<tool_call>",
|
| 56 |
+
"lstrip": false,
|
| 57 |
+
"normalized": false,
|
| 58 |
+
"rstrip": false,
|
| 59 |
+
"single_word": false,
|
| 60 |
+
"special": true
|
| 61 |
+
},
|
| 62 |
+
"49113": {
|
| 63 |
+
"content": "</tool_call>",
|
| 64 |
+
"lstrip": false,
|
| 65 |
+
"normalized": false,
|
| 66 |
+
"rstrip": false,
|
| 67 |
+
"single_word": false,
|
| 68 |
+
"special": true
|
| 69 |
+
},
|
| 70 |
+
"49114": {
|
| 71 |
+
"content": "<tool_response>",
|
| 72 |
+
"lstrip": false,
|
| 73 |
+
"normalized": false,
|
| 74 |
+
"rstrip": false,
|
| 75 |
+
"single_word": false,
|
| 76 |
+
"special": true
|
| 77 |
+
},
|
| 78 |
+
"49115": {
|
| 79 |
+
"content": "</tool_response>",
|
| 80 |
+
"lstrip": false,
|
| 81 |
+
"normalized": false,
|
| 82 |
+
"rstrip": false,
|
| 83 |
+
"single_word": false,
|
| 84 |
+
"special": true
|
| 85 |
+
},
|
| 86 |
+
"49116": {
|
| 87 |
+
"content": "<think>",
|
| 88 |
+
"lstrip": false,
|
| 89 |
+
"normalized": false,
|
| 90 |
+
"rstrip": false,
|
| 91 |
+
"single_word": false,
|
| 92 |
+
"special": true
|
| 93 |
+
},
|
| 94 |
+
"49117": {
|
| 95 |
+
"content": "</think>",
|
| 96 |
+
"lstrip": false,
|
| 97 |
+
"normalized": false,
|
| 98 |
+
"rstrip": false,
|
| 99 |
+
"single_word": false,
|
| 100 |
+
"special": true
|
| 101 |
+
},
|
| 102 |
+
"49118": {
|
| 103 |
+
"content": "<answer>",
|
| 104 |
+
"lstrip": false,
|
| 105 |
+
"normalized": false,
|
| 106 |
+
"rstrip": false,
|
| 107 |
+
"single_word": false,
|
| 108 |
+
"special": true
|
| 109 |
+
},
|
| 110 |
+
"49119": {
|
| 111 |
+
"content": "</answer>",
|
| 112 |
+
"lstrip": false,
|
| 113 |
+
"normalized": false,
|
| 114 |
+
"rstrip": false,
|
| 115 |
+
"single_word": false,
|
| 116 |
+
"special": true
|
| 117 |
+
},
|
| 118 |
+
"49120": {
|
| 119 |
+
"content": "<context>",
|
| 120 |
+
"lstrip": false,
|
| 121 |
+
"normalized": false,
|
| 122 |
+
"rstrip": false,
|
| 123 |
+
"single_word": false,
|
| 124 |
+
"special": true
|
| 125 |
+
},
|
| 126 |
+
"49121": {
|
| 127 |
+
"content": "</context>",
|
| 128 |
+
"lstrip": false,
|
| 129 |
+
"normalized": false,
|
| 130 |
+
"rstrip": false,
|
| 131 |
+
"single_word": false,
|
| 132 |
+
"special": true
|
| 133 |
+
},
|
| 134 |
+
"49122": {
|
| 135 |
+
"content": "<|fim_prefix|>",
|
| 136 |
+
"lstrip": false,
|
| 137 |
+
"normalized": false,
|
| 138 |
+
"rstrip": false,
|
| 139 |
+
"single_word": false,
|
| 140 |
+
"special": true
|
| 141 |
+
},
|
| 142 |
+
"49123": {
|
| 143 |
+
"content": "<|fim_suffix|>",
|
| 144 |
+
"lstrip": false,
|
| 145 |
+
"normalized": false,
|
| 146 |
+
"rstrip": false,
|
| 147 |
+
"single_word": false,
|
| 148 |
+
"special": true
|
| 149 |
+
},
|
| 150 |
+
"49124": {
|
| 151 |
+
"content": "<|fim_middle|>",
|
| 152 |
+
"lstrip": false,
|
| 153 |
+
"normalized": false,
|
| 154 |
+
"rstrip": false,
|
| 155 |
+
"single_word": false,
|
| 156 |
+
"special": true
|
| 157 |
+
},
|
| 158 |
+
"49125": {
|
| 159 |
+
"content": "<|repo_name|>",
|
| 160 |
+
"lstrip": false,
|
| 161 |
+
"normalized": false,
|
| 162 |
+
"rstrip": false,
|
| 163 |
+
"single_word": false,
|
| 164 |
+
"special": true
|
| 165 |
+
},
|
| 166 |
+
"49126": {
|
| 167 |
+
"content": "<|image|>",
|
| 168 |
+
"lstrip": false,
|
| 169 |
+
"normalized": false,
|
| 170 |
+
"rstrip": false,
|
| 171 |
+
"single_word": false,
|
| 172 |
+
"special": true
|
| 173 |
+
},
|
| 174 |
+
"49127": {
|
| 175 |
+
"content": "<|image_pad|>",
|
| 176 |
+
"lstrip": false,
|
| 177 |
+
"normalized": false,
|
| 178 |
+
"rstrip": false,
|
| 179 |
+
"single_word": false,
|
| 180 |
+
"special": true
|
| 181 |
+
},
|
| 182 |
+
"49128": {
|
| 183 |
+
"content": "<|image_placeholder|>",
|
| 184 |
+
"lstrip": false,
|
| 185 |
+
"normalized": false,
|
| 186 |
+
"rstrip": false,
|
| 187 |
+
"single_word": false,
|
| 188 |
+
"special": true
|
| 189 |
+
},
|
| 190 |
+
"49129": {
|
| 191 |
+
"content": " ",
|
| 192 |
+
"lstrip": false,
|
| 193 |
+
"normalized": false,
|
| 194 |
+
"rstrip": false,
|
| 195 |
+
"single_word": false,
|
| 196 |
+
"special": true
|
| 197 |
+
},
|
| 198 |
+
"49130": {
|
| 199 |
+
"content": " ",
|
| 200 |
+
"lstrip": false,
|
| 201 |
+
"normalized": false,
|
| 202 |
+
"rstrip": false,
|
| 203 |
+
"single_word": false,
|
| 204 |
+
"special": true
|
| 205 |
+
},
|
| 206 |
+
"49131": {
|
| 207 |
+
"content": " ",
|
| 208 |
+
"lstrip": false,
|
| 209 |
+
"normalized": false,
|
| 210 |
+
"rstrip": false,
|
| 211 |
+
"single_word": false,
|
| 212 |
+
"special": true
|
| 213 |
+
},
|
| 214 |
+
"49132": {
|
| 215 |
+
"content": " ",
|
| 216 |
+
"lstrip": false,
|
| 217 |
+
"normalized": false,
|
| 218 |
+
"rstrip": false,
|
| 219 |
+
"single_word": false,
|
| 220 |
+
"special": true
|
| 221 |
+
},
|
| 222 |
+
"49133": {
|
| 223 |
+
"content": " ",
|
| 224 |
+
"lstrip": false,
|
| 225 |
+
"normalized": false,
|
| 226 |
+
"rstrip": false,
|
| 227 |
+
"single_word": false,
|
| 228 |
+
"special": true
|
| 229 |
+
},
|
| 230 |
+
"49134": {
|
| 231 |
+
"content": " ",
|
| 232 |
+
"lstrip": false,
|
| 233 |
+
"normalized": false,
|
| 234 |
+
"rstrip": false,
|
| 235 |
+
"single_word": false,
|
| 236 |
+
"special": true
|
| 237 |
+
},
|
| 238 |
+
"49135": {
|
| 239 |
+
"content": " ",
|
| 240 |
+
"lstrip": false,
|
| 241 |
+
"normalized": false,
|
| 242 |
+
"rstrip": false,
|
| 243 |
+
"single_word": false,
|
| 244 |
+
"special": true
|
| 245 |
+
},
|
| 246 |
+
"49136": {
|
| 247 |
+
"content": " ",
|
| 248 |
+
"lstrip": false,
|
| 249 |
+
"normalized": false,
|
| 250 |
+
"rstrip": false,
|
| 251 |
+
"single_word": false,
|
| 252 |
+
"special": true
|
| 253 |
+
},
|
| 254 |
+
"49137": {
|
| 255 |
+
"content": " ",
|
| 256 |
+
"lstrip": false,
|
| 257 |
+
"normalized": false,
|
| 258 |
+
"rstrip": false,
|
| 259 |
+
"single_word": false,
|
| 260 |
+
"special": true
|
| 261 |
+
},
|
| 262 |
+
"49138": {
|
| 263 |
+
"content": " ",
|
| 264 |
+
"lstrip": false,
|
| 265 |
+
"normalized": false,
|
| 266 |
+
"rstrip": false,
|
| 267 |
+
"single_word": false,
|
| 268 |
+
"special": true
|
| 269 |
+
},
|
| 270 |
+
"49139": {
|
| 271 |
+
"content": " ",
|
| 272 |
+
"lstrip": false,
|
| 273 |
+
"normalized": false,
|
| 274 |
+
"rstrip": false,
|
| 275 |
+
"single_word": false,
|
| 276 |
+
"special": true
|
| 277 |
+
},
|
| 278 |
+
"49140": {
|
| 279 |
+
"content": " ",
|
| 280 |
+
"lstrip": false,
|
| 281 |
+
"normalized": false,
|
| 282 |
+
"rstrip": false,
|
| 283 |
+
"single_word": false,
|
| 284 |
+
"special": true
|
| 285 |
+
},
|
| 286 |
+
"49141": {
|
| 287 |
+
"content": " ",
|
| 288 |
+
"lstrip": false,
|
| 289 |
+
"normalized": false,
|
| 290 |
+
"rstrip": false,
|
| 291 |
+
"single_word": false,
|
| 292 |
+
"special": true
|
| 293 |
+
},
|
| 294 |
+
"49142": {
|
| 295 |
+
"content": " ",
|
| 296 |
+
"lstrip": false,
|
| 297 |
+
"normalized": false,
|
| 298 |
+
"rstrip": false,
|
| 299 |
+
"single_word": false,
|
| 300 |
+
"special": true
|
| 301 |
+
},
|
| 302 |
+
"49143": {
|
| 303 |
+
"content": " ",
|
| 304 |
+
"lstrip": false,
|
| 305 |
+
"normalized": false,
|
| 306 |
+
"rstrip": false,
|
| 307 |
+
"single_word": false,
|
| 308 |
+
"special": true
|
| 309 |
+
},
|
| 310 |
+
"49144": {
|
| 311 |
+
"content": " ",
|
| 312 |
+
"lstrip": false,
|
| 313 |
+
"normalized": false,
|
| 314 |
+
"rstrip": false,
|
| 315 |
+
"single_word": false,
|
| 316 |
+
"special": true
|
| 317 |
+
},
|
| 318 |
+
"49145": {
|
| 319 |
+
"content": " ",
|
| 320 |
+
"lstrip": false,
|
| 321 |
+
"normalized": false,
|
| 322 |
+
"rstrip": false,
|
| 323 |
+
"single_word": false,
|
| 324 |
+
"special": true
|
| 325 |
+
},
|
| 326 |
+
"49146": {
|
| 327 |
+
"content": " ",
|
| 328 |
+
"lstrip": false,
|
| 329 |
+
"normalized": false,
|
| 330 |
+
"rstrip": false,
|
| 331 |
+
"single_word": false,
|
| 332 |
+
"special": true
|
| 333 |
+
},
|
| 334 |
+
"49147": {
|
| 335 |
+
"content": " ",
|
| 336 |
+
"lstrip": false,
|
| 337 |
+
"normalized": false,
|
| 338 |
+
"rstrip": false,
|
| 339 |
+
"single_word": false,
|
| 340 |
+
"special": true
|
| 341 |
+
},
|
| 342 |
+
"49148": {
|
| 343 |
+
"content": " ",
|
| 344 |
+
"lstrip": false,
|
| 345 |
+
"normalized": false,
|
| 346 |
+
"rstrip": false,
|
| 347 |
+
"single_word": false,
|
| 348 |
+
"special": true
|
| 349 |
+
},
|
| 350 |
+
"49149": {
|
| 351 |
+
"content": " ",
|
| 352 |
+
"lstrip": false,
|
| 353 |
+
"normalized": false,
|
| 354 |
+
"rstrip": false,
|
| 355 |
+
"single_word": false,
|
| 356 |
+
"special": true
|
| 357 |
+
},
|
| 358 |
+
"49150": {
|
| 359 |
+
"content": " ",
|
| 360 |
+
"lstrip": false,
|
| 361 |
+
"normalized": false,
|
| 362 |
+
"rstrip": false,
|
| 363 |
+
"single_word": false,
|
| 364 |
+
"special": true
|
| 365 |
+
},
|
| 366 |
+
"49151": {
|
| 367 |
+
"content": " ",
|
| 368 |
+
"lstrip": false,
|
| 369 |
+
"normalized": false,
|
| 370 |
+
"rstrip": false,
|
| 371 |
+
"single_word": false,
|
| 372 |
+
"special": true
|
| 373 |
+
}
|
| 374 |
+
},
|
| 375 |
+
"additional_special_tokens": [
|
| 376 |
+
"<tools>",
|
| 377 |
+
"</tools>",
|
| 378 |
+
"<tool_call>",
|
| 379 |
+
"</tool_call>",
|
| 380 |
+
"<tool_response>",
|
| 381 |
+
"</tool_response>",
|
| 382 |
+
"<think>",
|
| 383 |
+
"</think>",
|
| 384 |
+
"<answer>",
|
| 385 |
+
"</answer>",
|
| 386 |
+
"<context>",
|
| 387 |
+
"</context>",
|
| 388 |
+
"<|fim_prefix|>",
|
| 389 |
+
"<|fim_suffix|>",
|
| 390 |
+
"<|fim_middle|>",
|
| 391 |
+
"<|repo_name|>",
|
| 392 |
+
"<|image|>",
|
| 393 |
+
"<|image_pad|>",
|
| 394 |
+
"<|image_placeholder|>",
|
| 395 |
+
" ",
|
| 396 |
+
" ",
|
| 397 |
+
" ",
|
| 398 |
+
" ",
|
| 399 |
+
" ",
|
| 400 |
+
" ",
|
| 401 |
+
" ",
|
| 402 |
+
" ",
|
| 403 |
+
" ",
|
| 404 |
+
" ",
|
| 405 |
+
" ",
|
| 406 |
+
" ",
|
| 407 |
+
" ",
|
| 408 |
+
" ",
|
| 409 |
+
" ",
|
| 410 |
+
" ",
|
| 411 |
+
" ",
|
| 412 |
+
" ",
|
| 413 |
+
" ",
|
| 414 |
+
" ",
|
| 415 |
+
" ",
|
| 416 |
+
" ",
|
| 417 |
+
" "
|
| 418 |
+
],
|
| 419 |
+
"bos_token": "<|im_start|>",
|
| 420 |
+
"bos_token_id": 1,
|
| 421 |
+
"clean_up_tokenization_spaces": false,
|
| 422 |
+
"eos_token": "<|im_end|>",
|
| 423 |
+
"eos_token_id": 2,
|
| 424 |
+
"extra_special_tokens": {},
|
| 425 |
+
"legacy": false,
|
| 426 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 427 |
+
"pad_token": "<|pad|>",
|
| 428 |
+
"pad_token_id": 49109,
|
| 429 |
+
"padding_side": "right",
|
| 430 |
+
"sp_model_kwargs": {},
|
| 431 |
+
"spaces_between_special_tokens": false,
|
| 432 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
| 433 |
+
"unk_token": "<|unk|>",
|
| 434 |
+
"unk_token_id": 0,
|
| 435 |
+
"use_default_system_prompt": false
|
| 436 |
+
}
|
train_logs.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4c13993097dba1bca36a48ed2642c6e6b1e506559369eb7d907dce5184b14d9b
|
| 3 |
+
size 3595583
|
training_config.yml
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Directory settings
|
| 2 |
+
checkpoint_dir: "/hindi/checkpoints"
|
| 3 |
+
train_dataset_dir: "/hindi_text_tokenized_all/train"
|
| 4 |
+
val_dataset_dir: "/hindi_text_tokenized_all/val"
|
| 5 |
+
dataset_type: "parquet"
|
| 6 |
+
cache_dir: "hindi/cache"
|
| 7 |
+
|
| 8 |
+
# Data loading settings
|
| 9 |
+
pin_memory: true
|
| 10 |
+
num_workers_for_dataloader: 16
|
| 11 |
+
shuffle_dataset: true
|
| 12 |
+
|
| 13 |
+
# Model architecture settings
|
| 14 |
+
vocab_size: 49152
|
| 15 |
+
num_hidden_layers: 28
|
| 16 |
+
num_attention_heads: 16
|
| 17 |
+
num_key_value_heads: 8
|
| 18 |
+
head_dim: null
|
| 19 |
+
hidden_size: 1536
|
| 20 |
+
intermediate_size: 3072
|
| 21 |
+
max_position_embeddings: 4096
|
| 22 |
+
tie_word_embeddings: true
|
| 23 |
+
hidden_act: "silu"
|
| 24 |
+
output_hidden_states: false
|
| 25 |
+
attn_implementation: "flash_attention_2"
|
| 26 |
+
use_cache: false
|
| 27 |
+
no_rope_layer_interval: null
|
| 28 |
+
rope_theta: 50000.0
|
| 29 |
+
rms_norm_eps: 0.000001
|
| 30 |
+
|
| 31 |
+
# Training settings
|
| 32 |
+
total_batch_size: 2097152
|
| 33 |
+
micro_batch_size: 16
|
| 34 |
+
eval_micro_batch_size: 8
|
| 35 |
+
num_train_epochs: 5
|
| 36 |
+
warmup_steps: 1000
|
| 37 |
+
max_learning_rate: 0.0007
|
| 38 |
+
min_learning_rate: 0.00007
|
| 39 |
+
weight_decay: 0.1
|
| 40 |
+
beta1: 0.9
|
| 41 |
+
beta2: 0.95
|
| 42 |
+
eps: 0.00000001
|
| 43 |
+
lr_decay_type: "cosine"
|
| 44 |
+
lr_decay_iters_coef: 0.9
|
| 45 |
+
seed: 1337
|
| 46 |
+
max_steps: null
|
| 47 |
+
max_grad_norm: 1.0
|
| 48 |
+
|
| 49 |
+
# Precision and optimization settings
|
| 50 |
+
torch_compile: false
|
| 51 |
+
mat_mul_precision: "highest"
|
| 52 |
+
tf32: true
|
| 53 |
+
bf16: true
|
| 54 |
+
gradient_checkpointing: false
|
| 55 |
+
use_liger_kernel: true
|
| 56 |
+
static_graph: false
|
| 57 |
+
|
| 58 |
+
# Hub settings
|
| 59 |
+
push_to_hub: false
|
| 60 |
+
hub_token: null
|
| 61 |
+
hub_model_id: null
|
| 62 |
+
|
| 63 |
+
# Tokenizer and Reference model
|
| 64 |
+
tokenizer_name_or_path: "Polygl0t/LilMoo-v0.1"
|
| 65 |
+
reference_model: "HuggingFaceTB/SmolLM2-360M"
|
| 66 |
+
|
| 67 |
+
# Checkpoint settings
|
| 68 |
+
resume_from_checkpoint: null
|
| 69 |
+
checkpointing_steps: 5000
|
| 70 |
+
begin_new_stage: false
|
| 71 |
+
stage_name: "S1"
|
| 72 |
+
|
| 73 |
+
# Miscellaneous settings
|
| 74 |
+
sanity_check: false
|
| 75 |
+
sanity_check_num_samples: 100000
|
| 76 |
+
wandb_token: null
|
| 77 |
+
wandb_id: "LilMoo-v0.1"
|
| 78 |
+
wandb_project: "Polyglot"
|
| 79 |
+
wandb_desc: "Developing LLMs for low-resource languages"
|
val_logs.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5998305eb2fa76fb625d5ccc5dce20b6247f66f08c2c23abdc10f8d6808fd9c6
|
| 3 |
+
size 2626
|