Snider Virgil commited on
Commit
662a530
·
0 Parent(s):

feat: LEK-merged multimodal MLX Q4 (On-device default, 4.1 GB)

Browse files

First clean commit after orphan rewrite — previous history was inherited
from a mis-cloned lthn/lemer main repo and referenced LFS blobs that
don't exist in this sibling repo's storage.

This repo contains:
- Gemma 4 E2B with Lethean Ethical Kernel (LEK) merged into 100 text
attention projections (q/k/v/o_proj) via LoRA finetune, then folded
into the base weights
- Native MLX Q4 conversion via mlx_vlm.convert
- Full multimodal support (text, image, audio) with vision and audio
towers preserved unmodified from Google's upstream Gemma 4 E2B
- Generation config with Google's calibrated sampling defaults
(temperature=1.0, top_p=0.95, top_k=64)
- Model card documenting variant-specific sizing, sibling repos, MLX
Quick Start (mlx-lm, mlx-vlm, mlx_vlm.server), EUPL-1.2 licensing

Base fork at LetheanNetwork/lemer remains the unmodified Google fork
for users who want raw bf16 without the LEK shift. Full Lemma family
documentation at https://huggingface.co/lthn/lemer.

Co-Authored-By: Virgil <virgil@lethean.io>

.gitattributes ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1,287 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ EUROPEAN UNION PUBLIC LICENCE v. 1.2
2
+ EUPL © the European Union 2007, 2016
3
+
4
+ This European Union Public Licence (the ‘EUPL’) applies to the Work (as defined
5
+ below) which is provided under the terms of this Licence. Any use of the Work,
6
+ other than as authorised under this Licence is prohibited (to the extent such
7
+ use is covered by a right of the copyright holder of the Work).
8
+
9
+ The Work is provided under the terms of this Licence when the Licensor (as
10
+ defined below) has placed the following notice immediately following the
11
+ copyright notice for the Work:
12
+
13
+ Licensed under the EUPL
14
+
15
+ or has expressed by any other means his willingness to license under the EUPL.
16
+
17
+ 1. Definitions
18
+
19
+ In this Licence, the following terms have the following meaning:
20
+
21
+ - ‘The Licence’: this Licence.
22
+
23
+ - ‘The Original Work’: the work or software distributed or communicated by the
24
+ Licensor under this Licence, available as Source Code and also as Executable
25
+ Code as the case may be.
26
+
27
+ - ‘Derivative Works’: the works or software that could be created by the
28
+ Licensee, based upon the Original Work or modifications thereof. This Licence
29
+ does not define the extent of modification or dependence on the Original Work
30
+ required in order to classify a work as a Derivative Work; this extent is
31
+ determined by copyright law applicable in the country mentioned in Article 15.
32
+
33
+ - ‘The Work’: the Original Work or its Derivative Works.
34
+
35
+ - ‘The Source Code’: the human-readable form of the Work which is the most
36
+ convenient for people to study and modify.
37
+
38
+ - ‘The Executable Code’: any code which has generally been compiled and which is
39
+ meant to be interpreted by a computer as a program.
40
+
41
+ - ‘The Licensor’: the natural or legal person that distributes or communicates
42
+ the Work under the Licence.
43
+
44
+ - ‘Contributor(s)’: any natural or legal person who modifies the Work under the
45
+ Licence, or otherwise contributes to the creation of a Derivative Work.
46
+
47
+ - ‘The Licensee’ or ‘You’: any natural or legal person who makes any usage of
48
+ the Work under the terms of the Licence.
49
+
50
+ - ‘Distribution’ or ‘Communication’: any act of selling, giving, lending,
51
+ renting, distributing, communicating, transmitting, or otherwise making
52
+ available, online or offline, copies of the Work or providing access to its
53
+ essential functionalities at the disposal of any other natural or legal
54
+ person.
55
+
56
+ 2. Scope of the rights granted by the Licence
57
+
58
+ The Licensor hereby grants You a worldwide, royalty-free, non-exclusive,
59
+ sublicensable licence to do the following, for the duration of copyright vested
60
+ in the Original Work:
61
+
62
+ - use the Work in any circumstance and for all usage,
63
+ - reproduce the Work,
64
+ - modify the Work, and make Derivative Works based upon the Work,
65
+ - communicate to the public, including the right to make available or display
66
+ the Work or copies thereof to the public and perform publicly, as the case may
67
+ be, the Work,
68
+ - distribute the Work or copies thereof,
69
+ - lend and rent the Work or copies thereof,
70
+ - sublicense rights in the Work or copies thereof.
71
+
72
+ Those rights can be exercised on any media, supports and formats, whether now
73
+ known or later invented, as far as the applicable law permits so.
74
+
75
+ In the countries where moral rights apply, the Licensor waives his right to
76
+ exercise his moral right to the extent allowed by law in order to make effective
77
+ the licence of the economic rights here above listed.
78
+
79
+ The Licensor grants to the Licensee royalty-free, non-exclusive usage rights to
80
+ any patents held by the Licensor, to the extent necessary to make use of the
81
+ rights granted on the Work under this Licence.
82
+
83
+ 3. Communication of the Source Code
84
+
85
+ The Licensor may provide the Work either in its Source Code form, or as
86
+ Executable Code. If the Work is provided as Executable Code, the Licensor
87
+ provides in addition a machine-readable copy of the Source Code of the Work
88
+ along with each copy of the Work that the Licensor distributes or indicates, in
89
+ a notice following the copyright notice attached to the Work, a repository where
90
+ the Source Code is easily and freely accessible for as long as the Licensor
91
+ continues to distribute or communicate the Work.
92
+
93
+ 4. Limitations on copyright
94
+
95
+ Nothing in this Licence is intended to deprive the Licensee of the benefits from
96
+ any exception or limitation to the exclusive rights of the rights owners in the
97
+ Work, of the exhaustion of those rights or of other applicable limitations
98
+ thereto.
99
+
100
+ 5. Obligations of the Licensee
101
+
102
+ The grant of the rights mentioned above is subject to some restrictions and
103
+ obligations imposed on the Licensee. Those obligations are the following:
104
+
105
+ Attribution right: The Licensee shall keep intact all copyright, patent or
106
+ trademarks notices and all notices that refer to the Licence and to the
107
+ disclaimer of warranties. The Licensee must include a copy of such notices and a
108
+ copy of the Licence with every copy of the Work he/she distributes or
109
+ communicates. The Licensee must cause any Derivative Work to carry prominent
110
+ notices stating that the Work has been modified and the date of modification.
111
+
112
+ Copyleft clause: If the Licensee distributes or communicates copies of the
113
+ Original Works or Derivative Works, this Distribution or Communication will be
114
+ done under the terms of this Licence or of a later version of this Licence
115
+ unless the Original Work is expressly distributed only under this version of the
116
+ Licence — for example by communicating ‘EUPL v. 1.2 only’. The Licensee
117
+ (becoming Licensor) cannot offer or impose any additional terms or conditions on
118
+ the Work or Derivative Work that alter or restrict the terms of the Licence.
119
+
120
+ Compatibility clause: If the Licensee Distributes or Communicates Derivative
121
+ Works or copies thereof based upon both the Work and another work licensed under
122
+ a Compatible Licence, this Distribution or Communication can be done under the
123
+ terms of this Compatible Licence. For the sake of this clause, ‘Compatible
124
+ Licence’ refers to the licences listed in the appendix attached to this Licence.
125
+ Should the Licensee's obligations under the Compatible Licence conflict with
126
+ his/her obligations under this Licence, the obligations of the Compatible
127
+ Licence shall prevail.
128
+
129
+ Provision of Source Code: When distributing or communicating copies of the Work,
130
+ the Licensee will provide a machine-readable copy of the Source Code or indicate
131
+ a repository where this Source will be easily and freely available for as long
132
+ as the Licensee continues to distribute or communicate the Work.
133
+
134
+ Legal Protection: This Licence does not grant permission to use the trade names,
135
+ trademarks, service marks, or names of the Licensor, except as required for
136
+ reasonable and customary use in describing the origin of the Work and
137
+ reproducing the content of the copyright notice.
138
+
139
+ 6. Chain of Authorship
140
+
141
+ The original Licensor warrants that the copyright in the Original Work granted
142
+ hereunder is owned by him/her or licensed to him/her and that he/she has the
143
+ power and authority to grant the Licence.
144
+
145
+ Each Contributor warrants that the copyright in the modifications he/she brings
146
+ to the Work are owned by him/her or licensed to him/her and that he/she has the
147
+ power and authority to grant the Licence.
148
+
149
+ Each time You accept the Licence, the original Licensor and subsequent
150
+ Contributors grant You a licence to their contributions to the Work, under the
151
+ terms of this Licence.
152
+
153
+ 7. Disclaimer of Warranty
154
+
155
+ The Work is a work in progress, which is continuously improved by numerous
156
+ Contributors. It is not a finished work and may therefore contain defects or
157
+ ‘bugs’ inherent to this type of development.
158
+
159
+ For the above reason, the Work is provided under the Licence on an ‘as is’ basis
160
+ and without warranties of any kind concerning the Work, including without
161
+ limitation merchantability, fitness for a particular purpose, absence of defects
162
+ or errors, accuracy, non-infringement of intellectual property rights other than
163
+ copyright as stated in Article 6 of this Licence.
164
+
165
+ This disclaimer of warranty is an essential part of the Licence and a condition
166
+ for the grant of any rights to the Work.
167
+
168
+ 8. Disclaimer of Liability
169
+
170
+ Except in the cases of wilful misconduct or damages directly caused to natural
171
+ persons, the Licensor will in no event be liable for any direct or indirect,
172
+ material or moral, damages of any kind, arising out of the Licence or of the use
173
+ of the Work, including without limitation, damages for loss of goodwill, work
174
+ stoppage, computer failure or malfunction, loss of data or any commercial
175
+ damage, even if the Licensor has been advised of the possibility of such damage.
176
+ However, the Licensor will be liable under statutory product liability laws as
177
+ far such laws apply to the Work.
178
+
179
+ 9. Additional agreements
180
+
181
+ While distributing the Work, You may choose to conclude an additional agreement,
182
+ defining obligations or services consistent with this Licence. However, if
183
+ accepting obligations, You may act only on your own behalf and on your sole
184
+ responsibility, not on behalf of the original Licensor or any other Contributor,
185
+ and only if You agree to indemnify, defend, and hold each Contributor harmless
186
+ for any liability incurred by, or claims asserted against such Contributor by
187
+ the fact You have accepted any warranty or additional liability.
188
+
189
+ 10. Acceptance of the Licence
190
+
191
+ The provisions of this Licence can be accepted by clicking on an icon ‘I agree’
192
+ placed under the bottom of a window displaying the text of this Licence or by
193
+ affirming consent in any other similar way, in accordance with the rules of
194
+ applicable law. Clicking on that icon indicates your clear and irrevocable
195
+ acceptance of this Licence and all of its terms and conditions.
196
+
197
+ Similarly, you irrevocably accept this Licence and all of its terms and
198
+ conditions by exercising any rights granted to You by Article 2 of this Licence,
199
+ such as the use of the Work, the creation by You of a Derivative Work or the
200
+ Distribution or Communication by You of the Work or copies thereof.
201
+
202
+ 11. Information to the public
203
+
204
+ In case of any Distribution or Communication of the Work by means of electronic
205
+ communication by You (for example, by offering to download the Work from a
206
+ remote location) the distribution channel or media (for example, a website) must
207
+ at least provide to the public the information requested by the applicable law
208
+ regarding the Licensor, the Licence and the way it may be accessible, concluded,
209
+ stored and reproduced by the Licensee.
210
+
211
+ 12. Termination of the Licence
212
+
213
+ The Licence and the rights granted hereunder will terminate automatically upon
214
+ any breach by the Licensee of the terms of the Licence.
215
+
216
+ Such a termination will not terminate the licences of any person who has
217
+ received the Work from the Licensee under the Licence, provided such persons
218
+ remain in full compliance with the Licence.
219
+
220
+ 13. Miscellaneous
221
+
222
+ Without prejudice of Article 9 above, the Licence represents the complete
223
+ agreement between the Parties as to the Work.
224
+
225
+ If any provision of the Licence is invalid or unenforceable under applicable
226
+ law, this will not affect the validity or enforceability of the Licence as a
227
+ whole. Such provision will be construed or reformed so as necessary to make it
228
+ valid and enforceable.
229
+
230
+ The European Commission may publish other linguistic versions or new versions of
231
+ this Licence or updated versions of the Appendix, so far this is required and
232
+ reasonable, without reducing the scope of the rights granted by the Licence. New
233
+ versions of the Licence will be published with a unique version number.
234
+
235
+ All linguistic versions of this Licence, approved by the European Commission,
236
+ have identical value. Parties can take advantage of the linguistic version of
237
+ their choice.
238
+
239
+ 14. Jurisdiction
240
+
241
+ Without prejudice to specific agreement between parties,
242
+
243
+ - any litigation resulting from the interpretation of this License, arising
244
+ between the European Union institutions, bodies, offices or agencies, as a
245
+ Licensor, and any Licensee, will be subject to the jurisdiction of the Court
246
+ of Justice of the European Union, as laid down in article 272 of the Treaty on
247
+ the Functioning of the European Union,
248
+
249
+ - any litigation arising between other parties and resulting from the
250
+ interpretation of this License, will be subject to the exclusive jurisdiction
251
+ of the competent court where the Licensor resides or conducts its primary
252
+ business.
253
+
254
+ 15. Applicable Law
255
+
256
+ Without prejudice to specific agreement between parties,
257
+
258
+ - this Licence shall be governed by the law of the European Union Member State
259
+ where the Licensor has his seat, resides or has his registered office,
260
+
261
+ - this licence shall be governed by Belgian law if the Licensor has no seat,
262
+ residence or registered office inside a European Union Member State.
263
+
264
+ Appendix
265
+
266
+ ‘Compatible Licences’ according to Article 5 EUPL are:
267
+
268
+ - GNU General Public License (GPL) v. 2, v. 3
269
+ - GNU Affero General Public License (AGPL) v. 3
270
+ - Open Software License (OSL) v. 2.1, v. 3.0
271
+ - Eclipse Public License (EPL) v. 1.0
272
+ - CeCILL v. 2.0, v. 2.1
273
+ - Mozilla Public Licence (MPL) v. 2
274
+ - GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3
275
+ - Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for
276
+ works other than software
277
+ - European Union Public Licence (EUPL) v. 1.1, v. 1.2
278
+ - Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong
279
+ Reciprocity (LiLiQ-R+).
280
+
281
+ The European Commission may update this Appendix to later versions of the above
282
+ licences without producing a new version of the EUPL, as long as they provide
283
+ the rights granted in Article 2 of this Licence and protect the covered Source
284
+ Code from exclusive appropriation.
285
+
286
+ All other changes or additions to this Appendix require the production of a new
287
+ EUPL version.
README.md ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: eupl-1.2
3
+ pipeline_tag: image-text-to-text
4
+ library_name: mlx
5
+ base_model:
6
+ - LetheanNetwork/lemer
7
+ base_model_relation: quantized
8
+ tags:
9
+ - gemma4
10
+ - gemma
11
+ - mlx
12
+ - mlx-q4
13
+ - safetensors
14
+ - quantized
15
+ - multimodal
16
+ - vision
17
+ - audio
18
+ - lemma
19
+ - lethean
20
+ - lem
21
+ - lek
22
+ - apple-silicon
23
+ - on-device
24
+ - conversational
25
+ ---
26
+ <!--
27
+ This content is subject to the European Union Public Licence (EUPL-1.2).
28
+ For full licence details, please refer to: https://huggingface.co/lthn/lemer-mlx/tree/main/LICENSE
29
+ Origin URL: https://huggingface.co/lthn/lemer-mlx/tree/main
30
+ -->
31
+ # Lemer (MLX Q4) — Gemma 4 E2B + LEK
32
+
33
+ **On-device default MLX 4-bit quantised build of [lemer](https://huggingface.co/lthn/lemer)** — Gemma 4 E2B with the Lethean Ethical Kernel (LEK) merged into the text attention weights, quantised to 4 bits per weight via `mlx-vlm`'s native quantisation (affine mode, group size 64). Full multimodal support preserved (text, image, audio). Effective rate: **6.851 bits per weight average** (embeddings and sensitive layers kept at higher precision). This is the **default on-device variant** — smallest footprint, fastest inference, best for consumer Apple Silicon.
34
+
35
+ **Other formats in the Lemma family:**
36
+
37
+ | Repo | Format | Size | Use case |
38
+ |---|---|---|---|
39
+ | [lthn/lemer](https://huggingface.co/lthn/lemer) | HF + GGUF + MLX Q4 bundled | 3–9 GB per variant | Main consumer repo — everything in one place |
40
+ | [lthn/lemer-mlx-bf16](https://huggingface.co/lthn/lemer-mlx-bf16) | MLX BF16 | 10.2 GB | Full-precision reference |
41
+ | [lthn/lemer-mlx-q8](https://huggingface.co/lthn/lemer-mlx-q8) | MLX Q8 | 5.9 GB | Near-lossless quantised |
42
+ | [lthn/lemer-mlx](https://huggingface.co/lthn/lemer-mlx) | MLX Q4 | **4.1 GB** | **You are here** — on-device default |
43
+ | [LetheanNetwork/lemer](https://huggingface.co/LetheanNetwork/lemer) | HF BF16 (unmodified base) | 10.2 GB | Raw Google Gemma 4 E2B fork, no LEK |
44
+
45
+ ## What This Is
46
+
47
+ The **Lethean Ethical Kernel (LEK)** has been merged directly into the text attention projections (100 `q/k/v/o_proj` layers) of Gemma 4 E2B via LoRA finetune, then folded into the base weights. The vision tower and audio tower are preserved unmodified from Google's upstream — LEK only shifts text reasoning.
48
+
49
+ This variant is **MLX Q4 quantised from the merged model** — the smallest, fastest multimodal Lemma variant suitable for on-device inference on consumer Apple Silicon. Single safetensor file, ~4.1 GB. Quantisation is 4 bits for attention/MLP weights, with embeddings and selected layers kept at higher precision (hence the 6.851 bits/weight average). Verified on M3 Ultra at **145+ tokens/sec generation** via `mlx-lm`; vision inference tested against COCO sample images via `mlx-vlm` with accurate descriptions.
50
+
51
+ Use this variant when:
52
+ - You want the default on-device Lemma experience
53
+ - You're running on consumer Apple Silicon (M1/M2/M3 base, Air, Pro, Studio)
54
+ - You need the fastest inference with acceptable quality
55
+ - Memory budget is limited (~5 GB runtime peak)
56
+
57
+ For higher fidelity, use [lemer-mlx-q8](https://huggingface.co/lthn/lemer-mlx-q8) at 5.9 GB or [lemer-mlx-bf16](https://huggingface.co/lthn/lemer-mlx-bf16) at 10.2 GB.
58
+
59
+ ## Quick Start
60
+
61
+ ### mlx-lm (text)
62
+
63
+ ```bash
64
+ uv tool install mlx-lm
65
+ mlx_lm.chat --model lthn/lemer-mlx
66
+ mlx_lm.generate --model lthn/lemer-mlx --prompt "Hello, how are you?"
67
+ ```
68
+
69
+ ### mlx-vlm (vision + audio multimodal)
70
+
71
+ ```bash
72
+ uv tool install mlx-vlm
73
+ ```
74
+
75
+ ```python
76
+ from mlx_vlm import load, generate
77
+ from mlx_vlm.prompt_utils import apply_chat_template
78
+ from mlx_vlm.utils import load_config
79
+
80
+ model, processor = load("lthn/lemer-mlx")
81
+ config = load_config("lthn/lemer-mlx")
82
+
83
+ image = ["http://images.cocodataset.org/val2017/000000039769.jpg"]
84
+ prompt = "Describe this image in one sentence."
85
+
86
+ formatted_prompt = apply_chat_template(
87
+ processor, config, prompt, num_images=1
88
+ )
89
+
90
+ output = generate(model, processor, formatted_prompt, image)
91
+ print(output.text)
92
+ ```
93
+
94
+ ### mlx-vlm server (OpenAI-compatible API)
95
+
96
+ ```bash
97
+ mlx_vlm.server --model lthn/lemer-mlx --port 8080
98
+ ```
99
+
100
+ Then any OpenAI-compatible client can hit `http://localhost:8080/v1/chat/completions`. Works with LM Studio, pi-coding-agent, OpenWebUI, and any other OpenAI-API-compatible client.
101
+
102
+ > **Note**: use `mlx_vlm.server` (not `mlx_lm.server`) because lemer is multimodal. The text-only `mlx_lm.server` does not correctly route the vision/audio tensors for Gemma 4.
103
+
104
+ ## Recommended Sampling
105
+
106
+ Per Google's [Gemma 4 model card](https://huggingface.co/google/gemma-4-E2B-it), use these across all use cases. **Gemma 4 is calibrated for `temperature=1.0` — greedy / temperature=0 is NOT recommended and will measurably underperform.**
107
+
108
+ | Parameter | Value |
109
+ |-----------|-------|
110
+ | `temperature` | 1.0 |
111
+ | `top_p` | 0.95 |
112
+ | `top_k` | 64 |
113
+
114
+ Already set in `generation_config.json`.
115
+
116
+ ## Model Details
117
+
118
+ | Property | Value |
119
+ |----------|-------|
120
+ | **Architecture** | Gemma 4 E2B |
121
+ | **Format** | MLX Q4 (affine quantisation) |
122
+ | **Quantisation bits** | 4 (6.851 bits/weight average including full-precision layers) |
123
+ | **Quantisation group size** | 64 |
124
+ | **Parameters** | 5.1B total, 2.3B effective (Per-Layer Embeddings) |
125
+ | **Layers** | 35 text decoder layers |
126
+ | **Context Length** | 128K tokens |
127
+ | **Vocabulary** | 262K tokens |
128
+ | **Modalities** | Text, Image, Audio |
129
+ | **Vision Encoder** | ~150M params (preserved unmodified from Google) |
130
+ | **Audio Encoder** | ~300M params (preserved unmodified from Google) |
131
+ | **Weight file** | Single `model.safetensors` (~4.1 GB) |
132
+ | **LEK delta** | LoRA rank 8 merged into 100 text attention projections, then quantised |
133
+ | **Quantisation source** | [lthn/lemer-mlx-bf16](https://huggingface.co/lthn/lemer-mlx-bf16) via `mlx_vlm.convert(quantize=True, q_bits=4, q_group_size=64)` |
134
+ | **Base fork** | [LetheanNetwork/lemer](https://huggingface.co/LetheanNetwork/lemer) (unmodified Google fork) |
135
+ | **Licence** | EUPL-1.2 |
136
+
137
+ ## Performance Notes
138
+
139
+ Verified on M3 Ultra (96 GB):
140
+ - **mlx-lm generation**: ~145 tokens/sec on text-only inference
141
+ - **Peak runtime memory**: ~3.4 GB (ample headroom for context growth)
142
+ - **Vision inference**: correct multi-object scene description on COCO test images
143
+
144
+ Should run comfortably on M1/M2/M3/M4 Air (8 GB RAM) for text inference, and on Pro/Max/Ultra variants for full multimodal workloads.
145
+
146
+ ## Full Model Card
147
+
148
+ Detailed documentation — Lemma family overview, GGUF variants, capability map, benchmarks, the "why EUPL-1.2" framing, and the Roadmap — lives on the main repo:
149
+
150
+ **→ [lthn/lemer](https://huggingface.co/lthn/lemer)**
151
+
152
+ ## About Lethean
153
+
154
+ [Lethean](https://lthn.ai) is a social enterprise building ethical AI infrastructure. The Lemma model family is part of the [LEM (Lethean Ethical Model)](https://github.com/LetheanNetwork) project — training protocol and tooling for intrinsic ethical alignment of language models via consent-based LoRA finetunes, shipped EUPL-1.2 so the ethical layer stays in the open.
155
+
156
+ - Website: [lthn.ai](https://lthn.ai)
157
+ - GitHub: [LetheanNetwork](https://github.com/LetheanNetwork)
158
+ - Axioms (public domain): [Snider/ai-ethics](https://github.com/Snider/ai-ethics)
159
+ - Licence: [EUPL-1.2](https://joinup.ec.europa.eu/collection/eupl/eupl-text-eupl-12)
chat_template.jinja ADDED
@@ -0,0 +1,344 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- macro format_parameters(properties, required) -%}
2
+ {%- set standard_keys = ['description', 'type', 'properties', 'required', 'nullable'] -%}
3
+ {%- set ns = namespace(found_first=false) -%}
4
+ {%- for key, value in properties | dictsort -%}
5
+ {%- set add_comma = false -%}
6
+ {%- if key not in standard_keys -%}
7
+ {%- if ns.found_first %},{% endif -%}
8
+ {%- set ns.found_first = true -%}
9
+ {{ key }}:{
10
+ {%- if value['description'] -%}
11
+ description:<|"|>{{ value['description'] }}<|"|>
12
+ {%- set add_comma = true -%}
13
+ {%- endif -%}
14
+ {%- if value['type'] | upper == 'STRING' -%}
15
+ {%- if value['enum'] -%}
16
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
17
+ enum:{{ format_argument(value['enum']) }}
18
+ {%- endif -%}
19
+ {%- elif value['type'] | upper == 'ARRAY' -%}
20
+ {%- if value['items'] is mapping and value['items'] -%}
21
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
22
+ items:{
23
+ {%- set ns_items = namespace(found_first=false) -%}
24
+ {%- for item_key, item_value in value['items'] | dictsort -%}
25
+ {%- if item_value is not none -%}
26
+ {%- if ns_items.found_first %},{% endif -%}
27
+ {%- set ns_items.found_first = true -%}
28
+ {%- if item_key == 'properties' -%}
29
+ properties:{
30
+ {%- if item_value is mapping -%}
31
+ {{- format_parameters(item_value, value['items']['required'] | default([])) -}}
32
+ {%- endif -%}
33
+ }
34
+ {%- elif item_key == 'required' -%}
35
+ required:[
36
+ {%- for req_item in item_value -%}
37
+ <|"|>{{- req_item -}}<|"|>
38
+ {%- if not loop.last %},{% endif -%}
39
+ {%- endfor -%}
40
+ ]
41
+ {%- elif item_key == 'type' -%}
42
+ {%- if item_value is string -%}
43
+ type:{{ format_argument(item_value | upper) }}
44
+ {%- else -%}
45
+ type:{{ format_argument(item_value | map('upper') | list) }}
46
+ {%- endif -%}
47
+ {%- else -%}
48
+ {{ item_key }}:{{ format_argument(item_value) }}
49
+ {%- endif -%}
50
+ {%- endif -%}
51
+ {%- endfor -%}
52
+ }
53
+ {%- endif -%}
54
+ {%- endif -%}
55
+ {%- if value['nullable'] %}
56
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
57
+ nullable:true
58
+ {%- endif -%}
59
+ {%- if value['type'] | upper == 'OBJECT' -%}
60
+ {%- if value['properties'] is defined and value['properties'] is mapping -%}
61
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
62
+ properties:{
63
+ {{- format_parameters(value['properties'], value['required'] | default([])) -}}
64
+ }
65
+ {%- elif value is mapping -%}
66
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
67
+ properties:{
68
+ {{- format_parameters(value, value['required'] | default([])) -}}
69
+ }
70
+ {%- endif -%}
71
+ {%- if value['required'] -%}
72
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
73
+ required:[
74
+ {%- for item in value['required'] | default([]) -%}
75
+ <|"|>{{- item -}}<|"|>
76
+ {%- if not loop.last %},{% endif -%}
77
+ {%- endfor -%}
78
+ ]
79
+ {%- endif -%}
80
+ {%- endif -%}
81
+ {%- if add_comma %},{%- else -%} {%- set add_comma = true -%} {% endif -%}
82
+ type:<|"|>{{ value['type'] | upper }}<|"|>}
83
+ {%- endif -%}
84
+ {%- endfor -%}
85
+ {%- endmacro -%}
86
+ {%- macro format_function_declaration(tool_data) -%}
87
+ declaration:{{- tool_data['function']['name'] -}}{description:<|"|>{{- tool_data['function']['description'] -}}<|"|>
88
+ {%- set params = tool_data['function']['parameters'] -%}
89
+ {%- if params -%}
90
+ ,parameters:{
91
+ {%- if params['properties'] -%}
92
+ properties:{ {{- format_parameters(params['properties'], params['required']) -}} },
93
+ {%- endif -%}
94
+ {%- if params['required'] -%}
95
+ required:[
96
+ {%- for item in params['required'] -%}
97
+ <|"|>{{- item -}}<|"|>
98
+ {{- ',' if not loop.last -}}
99
+ {%- endfor -%}
100
+ ],
101
+ {%- endif -%}
102
+ {%- if params['type'] -%}
103
+ type:<|"|>{{- params['type'] | upper -}}<|"|>}
104
+ {%- endif -%}
105
+ {%- endif -%}
106
+ {%- if 'response' in tool_data['function'] -%}
107
+ {%- set response_declaration = tool_data['function']['response'] -%}
108
+ ,response:{
109
+ {%- if response_declaration['description'] -%}
110
+ description:<|"|>{{- response_declaration['description'] -}}<|"|>,
111
+ {%- endif -%}
112
+ {%- if response_declaration['type'] | upper == 'OBJECT' -%}
113
+ type:<|"|>{{- response_declaration['type'] | upper -}}<|"|>}
114
+ {%- endif -%}
115
+ {%- endif -%}
116
+ }
117
+ {%- endmacro -%}
118
+ {%- macro format_argument(argument, escape_keys=True) -%}
119
+ {%- if argument is string -%}
120
+ {{- '<|"|>' + argument + '<|"|>' -}}
121
+ {%- elif argument is boolean -%}
122
+ {{- 'true' if argument else 'false' -}}
123
+ {%- elif argument is mapping -%}
124
+ {{- '{' -}}
125
+ {%- set ns = namespace(found_first=false) -%}
126
+ {%- for key, value in argument | dictsort -%}
127
+ {%- if ns.found_first %},{% endif -%}
128
+ {%- set ns.found_first = true -%}
129
+ {%- if escape_keys -%}
130
+ {{- '<|"|>' + key + '<|"|>' -}}
131
+ {%- else -%}
132
+ {{- key -}}
133
+ {%- endif -%}
134
+ :{{- format_argument(value, escape_keys=escape_keys) -}}
135
+ {%- endfor -%}
136
+ {{- '}' -}}
137
+ {%- elif argument is sequence -%}
138
+ {{- '[' -}}
139
+ {%- for item in argument -%}
140
+ {{- format_argument(item, escape_keys=escape_keys) -}}
141
+ {%- if not loop.last %},{% endif -%}
142
+ {%- endfor -%}
143
+ {{- ']' -}}
144
+ {%- else -%}
145
+ {{- argument -}}
146
+ {%- endif -%}
147
+ {%- endmacro -%}
148
+ {%- macro strip_thinking(text) -%}
149
+ {%- set ns = namespace(result='') -%}
150
+ {%- for part in text.split('<channel|>') -%}
151
+ {%- if '<|channel>' in part -%}
152
+ {%- set ns.result = ns.result + part.split('<|channel>')[0] -%}
153
+ {%- else -%}
154
+ {%- set ns.result = ns.result + part -%}
155
+ {%- endif -%}
156
+ {%- endfor -%}
157
+ {{- ns.result | trim -}}
158
+ {%- endmacro -%}
159
+
160
+ {%- macro format_tool_response_block(tool_name, response) -%}
161
+ {{- '<|tool_response>' -}}
162
+ {%- if response is mapping -%}
163
+ {{- 'response:' + tool_name + '{' -}}
164
+ {%- for key, value in response | dictsort -%}
165
+ {{- key -}}:{{- format_argument(value, escape_keys=False) -}}
166
+ {%- if not loop.last %},{% endif -%}
167
+ {%- endfor -%}
168
+ {{- '}' -}}
169
+ {%- else -%}
170
+ {{- 'response:' + tool_name + '{value:' + format_argument(response, escape_keys=False) + '}' -}}
171
+ {%- endif -%}
172
+ {{- '<tool_response|>' -}}
173
+ {%- endmacro -%}
174
+
175
+ {%- set ns = namespace(prev_message_type=None) -%}
176
+ {%- set loop_messages = messages -%}
177
+ {{- bos_token -}}
178
+ {#- Handle System/Tool Definitions Block -#}
179
+ {%- if (enable_thinking is defined and enable_thinking) or tools or messages[0]['role'] in ['system', 'developer'] -%}
180
+ {{- '<|turn>system\n' -}}
181
+
182
+ {#- Inject Thinking token at the very top of the FIRST system turn -#}
183
+ {%- if enable_thinking is defined and enable_thinking -%}
184
+ {{- '<|think|>\n' -}}
185
+ {%- set ns.prev_message_type = 'think' -%}
186
+ {%- endif -%}
187
+
188
+ {%- if messages[0]['role'] in ['system', 'developer'] -%}
189
+ {{- messages[0]['content'] | trim -}}
190
+ {%- set loop_messages = messages[1:] -%}
191
+ {%- endif -%}
192
+
193
+ {%- if tools -%}
194
+ {%- for tool in tools %}
195
+ {{- '<|tool>' -}}
196
+ {{- format_function_declaration(tool) | trim -}}
197
+ {{- '<tool|>' -}}
198
+ {%- endfor %}
199
+ {%- set ns.prev_message_type = 'tool' -%}
200
+ {%- endif -%}
201
+
202
+ {{- '<turn|>\n' -}}
203
+ {%- endif %}
204
+
205
+ {#- Pre-scan: find last user message index for reasoning guard -#}
206
+ {%- set ns_turn = namespace(last_user_idx=-1) -%}
207
+ {%- for i in range(loop_messages | length) -%}
208
+ {%- if loop_messages[i]['role'] == 'user' -%}
209
+ {%- set ns_turn.last_user_idx = i -%}
210
+ {%- endif -%}
211
+ {%- endfor -%}
212
+
213
+ {#- Loop through messages -#}
214
+ {%- for message in loop_messages -%}
215
+ {%- if message['role'] != 'tool' -%}
216
+ {%- set ns.prev_message_type = None -%}
217
+ {%- set role = 'model' if message['role'] == 'assistant' else message['role'] -%}
218
+ {#- Detect continuation: suppress duplicate <|turn>model when previous non-tool message was also assistant -#}
219
+ {%- set prev_nt = namespace(role=None, found=false) -%}
220
+ {%- if loop.index0 > 0 -%}
221
+ {%- for j in range(loop.index0 - 1, -1, -1) -%}
222
+ {%- if not prev_nt.found -%}
223
+ {%- if loop_messages[j]['role'] != 'tool' -%}
224
+ {%- set prev_nt.role = loop_messages[j]['role'] -%}
225
+ {%- set prev_nt.found = true -%}
226
+ {%- endif -%}
227
+ {%- endif -%}
228
+ {%- endfor -%}
229
+ {%- endif -%}
230
+ {%- set continue_same_model_turn = (role == 'model' and prev_nt.role == 'assistant') -%}
231
+ {%- if not continue_same_model_turn -%}
232
+ {{- '<|turn>' + role + '\n' }}
233
+ {%- endif -%}
234
+
235
+ {#- Render reasoning/reasoning_content as thinking channel -#}
236
+ {%- set thinking_text = message.get('reasoning') or message.get('reasoning_content') -%}
237
+ {%- if thinking_text and loop.index0 > ns_turn.last_user_idx and message.get('tool_calls') -%}
238
+ {{- '<|channel>thought\n' + thinking_text + '\n<channel|>' -}}
239
+ {%- endif -%}
240
+
241
+ {%- if message['tool_calls'] -%}
242
+ {%- for tool_call in message['tool_calls'] -%}
243
+ {%- set function = tool_call['function'] -%}
244
+ {{- '<|tool_call>call:' + function['name'] + '{' -}}
245
+ {%- if function['arguments'] is mapping -%}
246
+ {%- set ns_args = namespace(found_first=false) -%}
247
+ {%- for key, value in function['arguments'] | dictsort -%}
248
+ {%- if ns_args.found_first %},{% endif -%}
249
+ {%- set ns_args.found_first = true -%}
250
+ {{- key -}}:{{- format_argument(value, escape_keys=False) -}}
251
+ {%- endfor -%}
252
+ {%- elif function['arguments'] is string -%}
253
+ {{- function['arguments'] -}}
254
+ {%- endif -%}
255
+ {{- '}<tool_call|>' -}}
256
+ {%- endfor -%}
257
+ {%- set ns.prev_message_type = 'tool_call' -%}
258
+ {%- endif -%}
259
+
260
+ {%- set ns_tr_out = namespace(flag=false) -%}
261
+ {%- if message.get('tool_responses') -%}
262
+ {#- Legacy: tool_responses embedded on the assistant message (Google/Gemma native) -#}
263
+ {%- for tool_response in message['tool_responses'] -%}
264
+ {{- format_tool_response_block(tool_response['name'] | default('unknown'), tool_response['response']) -}}
265
+ {%- set ns_tr_out.flag = true -%}
266
+ {%- set ns.prev_message_type = 'tool_response' -%}
267
+ {%- endfor -%}
268
+ {%- elif message.get('tool_calls') -%}
269
+ {#- OpenAI Chat Completions: forward-scan consecutive role:tool messages -#}
270
+ {%- set ns_tool_scan = namespace(stopped=false) -%}
271
+ {%- for k in range(loop.index0 + 1, loop_messages | length) -%}
272
+ {%- if ns_tool_scan.stopped -%}
273
+ {%- elif loop_messages[k]['role'] != 'tool' -%}
274
+ {%- set ns_tool_scan.stopped = true -%}
275
+ {%- else -%}
276
+ {%- set follow = loop_messages[k] -%}
277
+ {#- Resolve tool_call_id to function name -#}
278
+ {%- set ns_tname = namespace(name=follow.get('name') | default('unknown')) -%}
279
+ {%- for tc in message['tool_calls'] -%}
280
+ {%- if tc.get('id') == follow.get('tool_call_id') -%}
281
+ {%- set ns_tname.name = tc['function']['name'] -%}
282
+ {%- endif -%}
283
+ {%- endfor -%}
284
+ {#- Handle content as string or content-parts array -#}
285
+ {%- set tool_body = follow.get('content') -%}
286
+ {%- if tool_body is string -%}
287
+ {{- format_tool_response_block(ns_tname.name, tool_body) -}}
288
+ {%- elif tool_body is sequence and tool_body is not string -%}
289
+ {%- set ns_txt = namespace(s='') -%}
290
+ {%- for part in tool_body -%}
291
+ {%- if part.get('type') == 'text' -%}
292
+ {%- set ns_txt.s = ns_txt.s + (part.get('text') | default('')) -%}
293
+ {%- endif -%}
294
+ {%- endfor -%}
295
+ {{- format_tool_response_block(ns_tname.name, ns_txt.s) -}}
296
+ {%- else -%}
297
+ {{- format_tool_response_block(ns_tname.name, tool_body) -}}
298
+ {%- endif -%}
299
+ {%- set ns_tr_out.flag = true -%}
300
+ {%- set ns.prev_message_type = 'tool_response' -%}
301
+ {%- endif -%}
302
+ {%- endfor -%}
303
+ {%- endif -%}
304
+
305
+ {%- if message['content'] is string -%}
306
+ {%- if role == 'model' -%}
307
+ {{- strip_thinking(message['content']) -}}
308
+ {%- else -%}
309
+ {{- message['content'] | trim -}}
310
+ {%- endif -%}
311
+ {%- elif message['content'] is sequence -%}
312
+ {%- for item in message['content'] -%}
313
+ {%- if item['type'] == 'text' -%}
314
+ {%- if role == 'model' -%}
315
+ {{- strip_thinking(item['text']) -}}
316
+ {%- else -%}
317
+ {{- item['text'] | trim -}}
318
+ {%- endif -%}
319
+ {%- elif item['type'] == 'image' -%}
320
+ {{- '<|image|>' -}}
321
+ {%- set ns.prev_message_type = 'image' -%}
322
+ {%- elif item['type'] == 'audio' -%}
323
+ {{- '<|audio|>' -}}
324
+ {%- set ns.prev_message_type = 'audio' -%}
325
+ {%- elif item['type'] == 'video' -%}
326
+ {{- '<|video|>' -}}
327
+ {%- set ns.prev_message_type = 'video' -%}
328
+ {%- endif -%}
329
+ {%- endfor -%}
330
+ {%- endif -%}
331
+
332
+ {%- if ns.prev_message_type == 'tool_call' and not ns_tr_out.flag -%}
333
+ {{- '<|tool_response>' -}}
334
+ {%- elif not (ns_tr_out.flag and not message.get('content')) -%}
335
+ {{- '<turn|>\n' -}}
336
+ {%- endif -%}
337
+ {%- endif -%}
338
+ {%- endfor -%}
339
+
340
+ {%- if add_generation_prompt -%}
341
+ {%- if ns.prev_message_type != 'tool_response' and ns.prev_message_type != 'tool_call' -%}
342
+ {{- '<|turn>model\n' -}}
343
+ {%- endif -%}
344
+ {%- endif -%}
config.json ADDED
@@ -0,0 +1,1042 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Gemma4ForConditionalGeneration"
4
+ ],
5
+ "audio_config": {
6
+ "_name_or_path": "",
7
+ "architectures": null,
8
+ "attention_chunk_size": 12,
9
+ "attention_context_left": 13,
10
+ "attention_context_right": 0,
11
+ "attention_invalid_logits_value": -1000000000.0,
12
+ "attention_logit_cap": 50.0,
13
+ "chunk_size_feed_forward": 0,
14
+ "conv_kernel_size": 5,
15
+ "dtype": "bfloat16",
16
+ "gradient_clipping": 10000000000.0,
17
+ "hidden_act": "silu",
18
+ "hidden_size": 1024,
19
+ "id2label": {
20
+ "0": "LABEL_0",
21
+ "1": "LABEL_1"
22
+ },
23
+ "initializer_range": 0.02,
24
+ "is_encoder_decoder": false,
25
+ "label2id": {
26
+ "LABEL_0": 0,
27
+ "LABEL_1": 1
28
+ },
29
+ "model_type": "gemma4_audio",
30
+ "num_attention_heads": 8,
31
+ "num_hidden_layers": 12,
32
+ "output_attentions": false,
33
+ "output_hidden_states": false,
34
+ "output_proj_dims": 1536,
35
+ "problem_type": null,
36
+ "residual_weight": 0.5,
37
+ "return_dict": true,
38
+ "rms_norm_eps": 1e-06,
39
+ "subsampling_conv_channels": [
40
+ 128,
41
+ 32
42
+ ],
43
+ "use_clipped_linears": true
44
+ },
45
+ "audio_token_id": 258881,
46
+ "boa_token_id": 256000,
47
+ "boi_token_id": 255999,
48
+ "dtype": "bfloat16",
49
+ "eoa_token_id": 258883,
50
+ "eoa_token_index": 258883,
51
+ "eoi_token_id": 258882,
52
+ "eos_token_id": [
53
+ 1,
54
+ 106,
55
+ 50
56
+ ],
57
+ "image_token_id": 258880,
58
+ "initializer_range": 0.02,
59
+ "model_type": "gemma4",
60
+ "quantization": {
61
+ "group_size": 64,
62
+ "bits": 4,
63
+ "mode": "affine",
64
+ "language_model.model.layers.0.mlp.gate_proj": {
65
+ "group_size": 64,
66
+ "bits": 8
67
+ },
68
+ "language_model.model.layers.0.mlp.down_proj": {
69
+ "group_size": 64,
70
+ "bits": 8
71
+ },
72
+ "language_model.model.layers.0.mlp.up_proj": {
73
+ "group_size": 64,
74
+ "bits": 8
75
+ },
76
+ "language_model.model.layers.1.mlp.gate_proj": {
77
+ "group_size": 64,
78
+ "bits": 8
79
+ },
80
+ "language_model.model.layers.1.mlp.down_proj": {
81
+ "group_size": 64,
82
+ "bits": 8
83
+ },
84
+ "language_model.model.layers.1.mlp.up_proj": {
85
+ "group_size": 64,
86
+ "bits": 8
87
+ },
88
+ "language_model.model.layers.2.mlp.gate_proj": {
89
+ "group_size": 64,
90
+ "bits": 8
91
+ },
92
+ "language_model.model.layers.2.mlp.down_proj": {
93
+ "group_size": 64,
94
+ "bits": 8
95
+ },
96
+ "language_model.model.layers.2.mlp.up_proj": {
97
+ "group_size": 64,
98
+ "bits": 8
99
+ },
100
+ "language_model.model.layers.3.mlp.gate_proj": {
101
+ "group_size": 64,
102
+ "bits": 8
103
+ },
104
+ "language_model.model.layers.3.mlp.down_proj": {
105
+ "group_size": 64,
106
+ "bits": 8
107
+ },
108
+ "language_model.model.layers.3.mlp.up_proj": {
109
+ "group_size": 64,
110
+ "bits": 8
111
+ },
112
+ "language_model.model.layers.4.mlp.gate_proj": {
113
+ "group_size": 64,
114
+ "bits": 8
115
+ },
116
+ "language_model.model.layers.4.mlp.down_proj": {
117
+ "group_size": 64,
118
+ "bits": 8
119
+ },
120
+ "language_model.model.layers.4.mlp.up_proj": {
121
+ "group_size": 64,
122
+ "bits": 8
123
+ },
124
+ "language_model.model.layers.5.mlp.gate_proj": {
125
+ "group_size": 64,
126
+ "bits": 8
127
+ },
128
+ "language_model.model.layers.5.mlp.down_proj": {
129
+ "group_size": 64,
130
+ "bits": 8
131
+ },
132
+ "language_model.model.layers.5.mlp.up_proj": {
133
+ "group_size": 64,
134
+ "bits": 8
135
+ },
136
+ "language_model.model.layers.6.mlp.gate_proj": {
137
+ "group_size": 64,
138
+ "bits": 8
139
+ },
140
+ "language_model.model.layers.6.mlp.down_proj": {
141
+ "group_size": 64,
142
+ "bits": 8
143
+ },
144
+ "language_model.model.layers.6.mlp.up_proj": {
145
+ "group_size": 64,
146
+ "bits": 8
147
+ },
148
+ "language_model.model.layers.7.mlp.gate_proj": {
149
+ "group_size": 64,
150
+ "bits": 8
151
+ },
152
+ "language_model.model.layers.7.mlp.down_proj": {
153
+ "group_size": 64,
154
+ "bits": 8
155
+ },
156
+ "language_model.model.layers.7.mlp.up_proj": {
157
+ "group_size": 64,
158
+ "bits": 8
159
+ },
160
+ "language_model.model.layers.8.mlp.gate_proj": {
161
+ "group_size": 64,
162
+ "bits": 8
163
+ },
164
+ "language_model.model.layers.8.mlp.down_proj": {
165
+ "group_size": 64,
166
+ "bits": 8
167
+ },
168
+ "language_model.model.layers.8.mlp.up_proj": {
169
+ "group_size": 64,
170
+ "bits": 8
171
+ },
172
+ "language_model.model.layers.9.mlp.gate_proj": {
173
+ "group_size": 64,
174
+ "bits": 8
175
+ },
176
+ "language_model.model.layers.9.mlp.down_proj": {
177
+ "group_size": 64,
178
+ "bits": 8
179
+ },
180
+ "language_model.model.layers.9.mlp.up_proj": {
181
+ "group_size": 64,
182
+ "bits": 8
183
+ },
184
+ "language_model.model.layers.10.mlp.gate_proj": {
185
+ "group_size": 64,
186
+ "bits": 8
187
+ },
188
+ "language_model.model.layers.10.mlp.down_proj": {
189
+ "group_size": 64,
190
+ "bits": 8
191
+ },
192
+ "language_model.model.layers.10.mlp.up_proj": {
193
+ "group_size": 64,
194
+ "bits": 8
195
+ },
196
+ "language_model.model.layers.11.mlp.gate_proj": {
197
+ "group_size": 64,
198
+ "bits": 8
199
+ },
200
+ "language_model.model.layers.11.mlp.down_proj": {
201
+ "group_size": 64,
202
+ "bits": 8
203
+ },
204
+ "language_model.model.layers.11.mlp.up_proj": {
205
+ "group_size": 64,
206
+ "bits": 8
207
+ },
208
+ "language_model.model.layers.12.mlp.gate_proj": {
209
+ "group_size": 64,
210
+ "bits": 8
211
+ },
212
+ "language_model.model.layers.12.mlp.down_proj": {
213
+ "group_size": 64,
214
+ "bits": 8
215
+ },
216
+ "language_model.model.layers.12.mlp.up_proj": {
217
+ "group_size": 64,
218
+ "bits": 8
219
+ },
220
+ "language_model.model.layers.13.mlp.gate_proj": {
221
+ "group_size": 64,
222
+ "bits": 8
223
+ },
224
+ "language_model.model.layers.13.mlp.down_proj": {
225
+ "group_size": 64,
226
+ "bits": 8
227
+ },
228
+ "language_model.model.layers.13.mlp.up_proj": {
229
+ "group_size": 64,
230
+ "bits": 8
231
+ },
232
+ "language_model.model.layers.14.mlp.gate_proj": {
233
+ "group_size": 64,
234
+ "bits": 8
235
+ },
236
+ "language_model.model.layers.14.mlp.down_proj": {
237
+ "group_size": 64,
238
+ "bits": 8
239
+ },
240
+ "language_model.model.layers.14.mlp.up_proj": {
241
+ "group_size": 64,
242
+ "bits": 8
243
+ },
244
+ "language_model.model.layers.15.mlp.gate_proj": {
245
+ "group_size": 64,
246
+ "bits": 8
247
+ },
248
+ "language_model.model.layers.15.mlp.down_proj": {
249
+ "group_size": 64,
250
+ "bits": 8
251
+ },
252
+ "language_model.model.layers.15.mlp.up_proj": {
253
+ "group_size": 64,
254
+ "bits": 8
255
+ },
256
+ "language_model.model.layers.16.mlp.gate_proj": {
257
+ "group_size": 64,
258
+ "bits": 8
259
+ },
260
+ "language_model.model.layers.16.mlp.down_proj": {
261
+ "group_size": 64,
262
+ "bits": 8
263
+ },
264
+ "language_model.model.layers.16.mlp.up_proj": {
265
+ "group_size": 64,
266
+ "bits": 8
267
+ },
268
+ "language_model.model.layers.17.mlp.gate_proj": {
269
+ "group_size": 64,
270
+ "bits": 8
271
+ },
272
+ "language_model.model.layers.17.mlp.down_proj": {
273
+ "group_size": 64,
274
+ "bits": 8
275
+ },
276
+ "language_model.model.layers.17.mlp.up_proj": {
277
+ "group_size": 64,
278
+ "bits": 8
279
+ },
280
+ "language_model.model.layers.18.mlp.gate_proj": {
281
+ "group_size": 64,
282
+ "bits": 8
283
+ },
284
+ "language_model.model.layers.18.mlp.down_proj": {
285
+ "group_size": 64,
286
+ "bits": 8
287
+ },
288
+ "language_model.model.layers.18.mlp.up_proj": {
289
+ "group_size": 64,
290
+ "bits": 8
291
+ },
292
+ "language_model.model.layers.19.mlp.gate_proj": {
293
+ "group_size": 64,
294
+ "bits": 8
295
+ },
296
+ "language_model.model.layers.19.mlp.down_proj": {
297
+ "group_size": 64,
298
+ "bits": 8
299
+ },
300
+ "language_model.model.layers.19.mlp.up_proj": {
301
+ "group_size": 64,
302
+ "bits": 8
303
+ },
304
+ "language_model.model.layers.20.mlp.gate_proj": {
305
+ "group_size": 64,
306
+ "bits": 8
307
+ },
308
+ "language_model.model.layers.20.mlp.down_proj": {
309
+ "group_size": 64,
310
+ "bits": 8
311
+ },
312
+ "language_model.model.layers.20.mlp.up_proj": {
313
+ "group_size": 64,
314
+ "bits": 8
315
+ },
316
+ "language_model.model.layers.21.mlp.gate_proj": {
317
+ "group_size": 64,
318
+ "bits": 8
319
+ },
320
+ "language_model.model.layers.21.mlp.down_proj": {
321
+ "group_size": 64,
322
+ "bits": 8
323
+ },
324
+ "language_model.model.layers.21.mlp.up_proj": {
325
+ "group_size": 64,
326
+ "bits": 8
327
+ },
328
+ "language_model.model.layers.22.mlp.gate_proj": {
329
+ "group_size": 64,
330
+ "bits": 8
331
+ },
332
+ "language_model.model.layers.22.mlp.down_proj": {
333
+ "group_size": 64,
334
+ "bits": 8
335
+ },
336
+ "language_model.model.layers.22.mlp.up_proj": {
337
+ "group_size": 64,
338
+ "bits": 8
339
+ },
340
+ "language_model.model.layers.23.mlp.gate_proj": {
341
+ "group_size": 64,
342
+ "bits": 8
343
+ },
344
+ "language_model.model.layers.23.mlp.down_proj": {
345
+ "group_size": 64,
346
+ "bits": 8
347
+ },
348
+ "language_model.model.layers.23.mlp.up_proj": {
349
+ "group_size": 64,
350
+ "bits": 8
351
+ },
352
+ "language_model.model.layers.24.mlp.gate_proj": {
353
+ "group_size": 64,
354
+ "bits": 8
355
+ },
356
+ "language_model.model.layers.24.mlp.down_proj": {
357
+ "group_size": 64,
358
+ "bits": 8
359
+ },
360
+ "language_model.model.layers.24.mlp.up_proj": {
361
+ "group_size": 64,
362
+ "bits": 8
363
+ },
364
+ "language_model.model.layers.25.mlp.gate_proj": {
365
+ "group_size": 64,
366
+ "bits": 8
367
+ },
368
+ "language_model.model.layers.25.mlp.down_proj": {
369
+ "group_size": 64,
370
+ "bits": 8
371
+ },
372
+ "language_model.model.layers.25.mlp.up_proj": {
373
+ "group_size": 64,
374
+ "bits": 8
375
+ },
376
+ "language_model.model.layers.26.mlp.gate_proj": {
377
+ "group_size": 64,
378
+ "bits": 8
379
+ },
380
+ "language_model.model.layers.26.mlp.down_proj": {
381
+ "group_size": 64,
382
+ "bits": 8
383
+ },
384
+ "language_model.model.layers.26.mlp.up_proj": {
385
+ "group_size": 64,
386
+ "bits": 8
387
+ },
388
+ "language_model.model.layers.27.mlp.gate_proj": {
389
+ "group_size": 64,
390
+ "bits": 8
391
+ },
392
+ "language_model.model.layers.27.mlp.down_proj": {
393
+ "group_size": 64,
394
+ "bits": 8
395
+ },
396
+ "language_model.model.layers.27.mlp.up_proj": {
397
+ "group_size": 64,
398
+ "bits": 8
399
+ },
400
+ "language_model.model.layers.28.mlp.gate_proj": {
401
+ "group_size": 64,
402
+ "bits": 8
403
+ },
404
+ "language_model.model.layers.28.mlp.down_proj": {
405
+ "group_size": 64,
406
+ "bits": 8
407
+ },
408
+ "language_model.model.layers.28.mlp.up_proj": {
409
+ "group_size": 64,
410
+ "bits": 8
411
+ },
412
+ "language_model.model.layers.29.mlp.gate_proj": {
413
+ "group_size": 64,
414
+ "bits": 8
415
+ },
416
+ "language_model.model.layers.29.mlp.down_proj": {
417
+ "group_size": 64,
418
+ "bits": 8
419
+ },
420
+ "language_model.model.layers.29.mlp.up_proj": {
421
+ "group_size": 64,
422
+ "bits": 8
423
+ },
424
+ "language_model.model.layers.30.mlp.gate_proj": {
425
+ "group_size": 64,
426
+ "bits": 8
427
+ },
428
+ "language_model.model.layers.30.mlp.down_proj": {
429
+ "group_size": 64,
430
+ "bits": 8
431
+ },
432
+ "language_model.model.layers.30.mlp.up_proj": {
433
+ "group_size": 64,
434
+ "bits": 8
435
+ },
436
+ "language_model.model.layers.31.mlp.gate_proj": {
437
+ "group_size": 64,
438
+ "bits": 8
439
+ },
440
+ "language_model.model.layers.31.mlp.down_proj": {
441
+ "group_size": 64,
442
+ "bits": 8
443
+ },
444
+ "language_model.model.layers.31.mlp.up_proj": {
445
+ "group_size": 64,
446
+ "bits": 8
447
+ },
448
+ "language_model.model.layers.32.mlp.gate_proj": {
449
+ "group_size": 64,
450
+ "bits": 8
451
+ },
452
+ "language_model.model.layers.32.mlp.down_proj": {
453
+ "group_size": 64,
454
+ "bits": 8
455
+ },
456
+ "language_model.model.layers.32.mlp.up_proj": {
457
+ "group_size": 64,
458
+ "bits": 8
459
+ },
460
+ "language_model.model.layers.33.mlp.gate_proj": {
461
+ "group_size": 64,
462
+ "bits": 8
463
+ },
464
+ "language_model.model.layers.33.mlp.down_proj": {
465
+ "group_size": 64,
466
+ "bits": 8
467
+ },
468
+ "language_model.model.layers.33.mlp.up_proj": {
469
+ "group_size": 64,
470
+ "bits": 8
471
+ },
472
+ "language_model.model.layers.34.mlp.gate_proj": {
473
+ "group_size": 64,
474
+ "bits": 8
475
+ },
476
+ "language_model.model.layers.34.mlp.down_proj": {
477
+ "group_size": 64,
478
+ "bits": 8
479
+ },
480
+ "language_model.model.layers.34.mlp.up_proj": {
481
+ "group_size": 64,
482
+ "bits": 8
483
+ }
484
+ },
485
+ "quantization_config": {
486
+ "group_size": 64,
487
+ "bits": 4,
488
+ "mode": "affine",
489
+ "language_model.model.layers.0.mlp.gate_proj": {
490
+ "group_size": 64,
491
+ "bits": 8
492
+ },
493
+ "language_model.model.layers.0.mlp.down_proj": {
494
+ "group_size": 64,
495
+ "bits": 8
496
+ },
497
+ "language_model.model.layers.0.mlp.up_proj": {
498
+ "group_size": 64,
499
+ "bits": 8
500
+ },
501
+ "language_model.model.layers.1.mlp.gate_proj": {
502
+ "group_size": 64,
503
+ "bits": 8
504
+ },
505
+ "language_model.model.layers.1.mlp.down_proj": {
506
+ "group_size": 64,
507
+ "bits": 8
508
+ },
509
+ "language_model.model.layers.1.mlp.up_proj": {
510
+ "group_size": 64,
511
+ "bits": 8
512
+ },
513
+ "language_model.model.layers.2.mlp.gate_proj": {
514
+ "group_size": 64,
515
+ "bits": 8
516
+ },
517
+ "language_model.model.layers.2.mlp.down_proj": {
518
+ "group_size": 64,
519
+ "bits": 8
520
+ },
521
+ "language_model.model.layers.2.mlp.up_proj": {
522
+ "group_size": 64,
523
+ "bits": 8
524
+ },
525
+ "language_model.model.layers.3.mlp.gate_proj": {
526
+ "group_size": 64,
527
+ "bits": 8
528
+ },
529
+ "language_model.model.layers.3.mlp.down_proj": {
530
+ "group_size": 64,
531
+ "bits": 8
532
+ },
533
+ "language_model.model.layers.3.mlp.up_proj": {
534
+ "group_size": 64,
535
+ "bits": 8
536
+ },
537
+ "language_model.model.layers.4.mlp.gate_proj": {
538
+ "group_size": 64,
539
+ "bits": 8
540
+ },
541
+ "language_model.model.layers.4.mlp.down_proj": {
542
+ "group_size": 64,
543
+ "bits": 8
544
+ },
545
+ "language_model.model.layers.4.mlp.up_proj": {
546
+ "group_size": 64,
547
+ "bits": 8
548
+ },
549
+ "language_model.model.layers.5.mlp.gate_proj": {
550
+ "group_size": 64,
551
+ "bits": 8
552
+ },
553
+ "language_model.model.layers.5.mlp.down_proj": {
554
+ "group_size": 64,
555
+ "bits": 8
556
+ },
557
+ "language_model.model.layers.5.mlp.up_proj": {
558
+ "group_size": 64,
559
+ "bits": 8
560
+ },
561
+ "language_model.model.layers.6.mlp.gate_proj": {
562
+ "group_size": 64,
563
+ "bits": 8
564
+ },
565
+ "language_model.model.layers.6.mlp.down_proj": {
566
+ "group_size": 64,
567
+ "bits": 8
568
+ },
569
+ "language_model.model.layers.6.mlp.up_proj": {
570
+ "group_size": 64,
571
+ "bits": 8
572
+ },
573
+ "language_model.model.layers.7.mlp.gate_proj": {
574
+ "group_size": 64,
575
+ "bits": 8
576
+ },
577
+ "language_model.model.layers.7.mlp.down_proj": {
578
+ "group_size": 64,
579
+ "bits": 8
580
+ },
581
+ "language_model.model.layers.7.mlp.up_proj": {
582
+ "group_size": 64,
583
+ "bits": 8
584
+ },
585
+ "language_model.model.layers.8.mlp.gate_proj": {
586
+ "group_size": 64,
587
+ "bits": 8
588
+ },
589
+ "language_model.model.layers.8.mlp.down_proj": {
590
+ "group_size": 64,
591
+ "bits": 8
592
+ },
593
+ "language_model.model.layers.8.mlp.up_proj": {
594
+ "group_size": 64,
595
+ "bits": 8
596
+ },
597
+ "language_model.model.layers.9.mlp.gate_proj": {
598
+ "group_size": 64,
599
+ "bits": 8
600
+ },
601
+ "language_model.model.layers.9.mlp.down_proj": {
602
+ "group_size": 64,
603
+ "bits": 8
604
+ },
605
+ "language_model.model.layers.9.mlp.up_proj": {
606
+ "group_size": 64,
607
+ "bits": 8
608
+ },
609
+ "language_model.model.layers.10.mlp.gate_proj": {
610
+ "group_size": 64,
611
+ "bits": 8
612
+ },
613
+ "language_model.model.layers.10.mlp.down_proj": {
614
+ "group_size": 64,
615
+ "bits": 8
616
+ },
617
+ "language_model.model.layers.10.mlp.up_proj": {
618
+ "group_size": 64,
619
+ "bits": 8
620
+ },
621
+ "language_model.model.layers.11.mlp.gate_proj": {
622
+ "group_size": 64,
623
+ "bits": 8
624
+ },
625
+ "language_model.model.layers.11.mlp.down_proj": {
626
+ "group_size": 64,
627
+ "bits": 8
628
+ },
629
+ "language_model.model.layers.11.mlp.up_proj": {
630
+ "group_size": 64,
631
+ "bits": 8
632
+ },
633
+ "language_model.model.layers.12.mlp.gate_proj": {
634
+ "group_size": 64,
635
+ "bits": 8
636
+ },
637
+ "language_model.model.layers.12.mlp.down_proj": {
638
+ "group_size": 64,
639
+ "bits": 8
640
+ },
641
+ "language_model.model.layers.12.mlp.up_proj": {
642
+ "group_size": 64,
643
+ "bits": 8
644
+ },
645
+ "language_model.model.layers.13.mlp.gate_proj": {
646
+ "group_size": 64,
647
+ "bits": 8
648
+ },
649
+ "language_model.model.layers.13.mlp.down_proj": {
650
+ "group_size": 64,
651
+ "bits": 8
652
+ },
653
+ "language_model.model.layers.13.mlp.up_proj": {
654
+ "group_size": 64,
655
+ "bits": 8
656
+ },
657
+ "language_model.model.layers.14.mlp.gate_proj": {
658
+ "group_size": 64,
659
+ "bits": 8
660
+ },
661
+ "language_model.model.layers.14.mlp.down_proj": {
662
+ "group_size": 64,
663
+ "bits": 8
664
+ },
665
+ "language_model.model.layers.14.mlp.up_proj": {
666
+ "group_size": 64,
667
+ "bits": 8
668
+ },
669
+ "language_model.model.layers.15.mlp.gate_proj": {
670
+ "group_size": 64,
671
+ "bits": 8
672
+ },
673
+ "language_model.model.layers.15.mlp.down_proj": {
674
+ "group_size": 64,
675
+ "bits": 8
676
+ },
677
+ "language_model.model.layers.15.mlp.up_proj": {
678
+ "group_size": 64,
679
+ "bits": 8
680
+ },
681
+ "language_model.model.layers.16.mlp.gate_proj": {
682
+ "group_size": 64,
683
+ "bits": 8
684
+ },
685
+ "language_model.model.layers.16.mlp.down_proj": {
686
+ "group_size": 64,
687
+ "bits": 8
688
+ },
689
+ "language_model.model.layers.16.mlp.up_proj": {
690
+ "group_size": 64,
691
+ "bits": 8
692
+ },
693
+ "language_model.model.layers.17.mlp.gate_proj": {
694
+ "group_size": 64,
695
+ "bits": 8
696
+ },
697
+ "language_model.model.layers.17.mlp.down_proj": {
698
+ "group_size": 64,
699
+ "bits": 8
700
+ },
701
+ "language_model.model.layers.17.mlp.up_proj": {
702
+ "group_size": 64,
703
+ "bits": 8
704
+ },
705
+ "language_model.model.layers.18.mlp.gate_proj": {
706
+ "group_size": 64,
707
+ "bits": 8
708
+ },
709
+ "language_model.model.layers.18.mlp.down_proj": {
710
+ "group_size": 64,
711
+ "bits": 8
712
+ },
713
+ "language_model.model.layers.18.mlp.up_proj": {
714
+ "group_size": 64,
715
+ "bits": 8
716
+ },
717
+ "language_model.model.layers.19.mlp.gate_proj": {
718
+ "group_size": 64,
719
+ "bits": 8
720
+ },
721
+ "language_model.model.layers.19.mlp.down_proj": {
722
+ "group_size": 64,
723
+ "bits": 8
724
+ },
725
+ "language_model.model.layers.19.mlp.up_proj": {
726
+ "group_size": 64,
727
+ "bits": 8
728
+ },
729
+ "language_model.model.layers.20.mlp.gate_proj": {
730
+ "group_size": 64,
731
+ "bits": 8
732
+ },
733
+ "language_model.model.layers.20.mlp.down_proj": {
734
+ "group_size": 64,
735
+ "bits": 8
736
+ },
737
+ "language_model.model.layers.20.mlp.up_proj": {
738
+ "group_size": 64,
739
+ "bits": 8
740
+ },
741
+ "language_model.model.layers.21.mlp.gate_proj": {
742
+ "group_size": 64,
743
+ "bits": 8
744
+ },
745
+ "language_model.model.layers.21.mlp.down_proj": {
746
+ "group_size": 64,
747
+ "bits": 8
748
+ },
749
+ "language_model.model.layers.21.mlp.up_proj": {
750
+ "group_size": 64,
751
+ "bits": 8
752
+ },
753
+ "language_model.model.layers.22.mlp.gate_proj": {
754
+ "group_size": 64,
755
+ "bits": 8
756
+ },
757
+ "language_model.model.layers.22.mlp.down_proj": {
758
+ "group_size": 64,
759
+ "bits": 8
760
+ },
761
+ "language_model.model.layers.22.mlp.up_proj": {
762
+ "group_size": 64,
763
+ "bits": 8
764
+ },
765
+ "language_model.model.layers.23.mlp.gate_proj": {
766
+ "group_size": 64,
767
+ "bits": 8
768
+ },
769
+ "language_model.model.layers.23.mlp.down_proj": {
770
+ "group_size": 64,
771
+ "bits": 8
772
+ },
773
+ "language_model.model.layers.23.mlp.up_proj": {
774
+ "group_size": 64,
775
+ "bits": 8
776
+ },
777
+ "language_model.model.layers.24.mlp.gate_proj": {
778
+ "group_size": 64,
779
+ "bits": 8
780
+ },
781
+ "language_model.model.layers.24.mlp.down_proj": {
782
+ "group_size": 64,
783
+ "bits": 8
784
+ },
785
+ "language_model.model.layers.24.mlp.up_proj": {
786
+ "group_size": 64,
787
+ "bits": 8
788
+ },
789
+ "language_model.model.layers.25.mlp.gate_proj": {
790
+ "group_size": 64,
791
+ "bits": 8
792
+ },
793
+ "language_model.model.layers.25.mlp.down_proj": {
794
+ "group_size": 64,
795
+ "bits": 8
796
+ },
797
+ "language_model.model.layers.25.mlp.up_proj": {
798
+ "group_size": 64,
799
+ "bits": 8
800
+ },
801
+ "language_model.model.layers.26.mlp.gate_proj": {
802
+ "group_size": 64,
803
+ "bits": 8
804
+ },
805
+ "language_model.model.layers.26.mlp.down_proj": {
806
+ "group_size": 64,
807
+ "bits": 8
808
+ },
809
+ "language_model.model.layers.26.mlp.up_proj": {
810
+ "group_size": 64,
811
+ "bits": 8
812
+ },
813
+ "language_model.model.layers.27.mlp.gate_proj": {
814
+ "group_size": 64,
815
+ "bits": 8
816
+ },
817
+ "language_model.model.layers.27.mlp.down_proj": {
818
+ "group_size": 64,
819
+ "bits": 8
820
+ },
821
+ "language_model.model.layers.27.mlp.up_proj": {
822
+ "group_size": 64,
823
+ "bits": 8
824
+ },
825
+ "language_model.model.layers.28.mlp.gate_proj": {
826
+ "group_size": 64,
827
+ "bits": 8
828
+ },
829
+ "language_model.model.layers.28.mlp.down_proj": {
830
+ "group_size": 64,
831
+ "bits": 8
832
+ },
833
+ "language_model.model.layers.28.mlp.up_proj": {
834
+ "group_size": 64,
835
+ "bits": 8
836
+ },
837
+ "language_model.model.layers.29.mlp.gate_proj": {
838
+ "group_size": 64,
839
+ "bits": 8
840
+ },
841
+ "language_model.model.layers.29.mlp.down_proj": {
842
+ "group_size": 64,
843
+ "bits": 8
844
+ },
845
+ "language_model.model.layers.29.mlp.up_proj": {
846
+ "group_size": 64,
847
+ "bits": 8
848
+ },
849
+ "language_model.model.layers.30.mlp.gate_proj": {
850
+ "group_size": 64,
851
+ "bits": 8
852
+ },
853
+ "language_model.model.layers.30.mlp.down_proj": {
854
+ "group_size": 64,
855
+ "bits": 8
856
+ },
857
+ "language_model.model.layers.30.mlp.up_proj": {
858
+ "group_size": 64,
859
+ "bits": 8
860
+ },
861
+ "language_model.model.layers.31.mlp.gate_proj": {
862
+ "group_size": 64,
863
+ "bits": 8
864
+ },
865
+ "language_model.model.layers.31.mlp.down_proj": {
866
+ "group_size": 64,
867
+ "bits": 8
868
+ },
869
+ "language_model.model.layers.31.mlp.up_proj": {
870
+ "group_size": 64,
871
+ "bits": 8
872
+ },
873
+ "language_model.model.layers.32.mlp.gate_proj": {
874
+ "group_size": 64,
875
+ "bits": 8
876
+ },
877
+ "language_model.model.layers.32.mlp.down_proj": {
878
+ "group_size": 64,
879
+ "bits": 8
880
+ },
881
+ "language_model.model.layers.32.mlp.up_proj": {
882
+ "group_size": 64,
883
+ "bits": 8
884
+ },
885
+ "language_model.model.layers.33.mlp.gate_proj": {
886
+ "group_size": 64,
887
+ "bits": 8
888
+ },
889
+ "language_model.model.layers.33.mlp.down_proj": {
890
+ "group_size": 64,
891
+ "bits": 8
892
+ },
893
+ "language_model.model.layers.33.mlp.up_proj": {
894
+ "group_size": 64,
895
+ "bits": 8
896
+ },
897
+ "language_model.model.layers.34.mlp.gate_proj": {
898
+ "group_size": 64,
899
+ "bits": 8
900
+ },
901
+ "language_model.model.layers.34.mlp.down_proj": {
902
+ "group_size": 64,
903
+ "bits": 8
904
+ },
905
+ "language_model.model.layers.34.mlp.up_proj": {
906
+ "group_size": 64,
907
+ "bits": 8
908
+ }
909
+ },
910
+ "text_config": {
911
+ "attention_bias": false,
912
+ "attention_dropout": 0.0,
913
+ "attention_k_eq_v": false,
914
+ "bos_token_id": 2,
915
+ "dtype": "bfloat16",
916
+ "enable_moe_block": false,
917
+ "eos_token_id": 1,
918
+ "expert_intermediate_size": null,
919
+ "final_logit_softcapping": 30.0,
920
+ "global_head_dim": 512,
921
+ "head_dim": 256,
922
+ "hidden_activation": "gelu_pytorch_tanh",
923
+ "hidden_size": 1536,
924
+ "hidden_size_per_layer_input": 256,
925
+ "initializer_range": 0.02,
926
+ "intermediate_size": 6144,
927
+ "layer_types": [
928
+ "sliding_attention",
929
+ "sliding_attention",
930
+ "sliding_attention",
931
+ "sliding_attention",
932
+ "full_attention",
933
+ "sliding_attention",
934
+ "sliding_attention",
935
+ "sliding_attention",
936
+ "sliding_attention",
937
+ "full_attention",
938
+ "sliding_attention",
939
+ "sliding_attention",
940
+ "sliding_attention",
941
+ "sliding_attention",
942
+ "full_attention",
943
+ "sliding_attention",
944
+ "sliding_attention",
945
+ "sliding_attention",
946
+ "sliding_attention",
947
+ "full_attention",
948
+ "sliding_attention",
949
+ "sliding_attention",
950
+ "sliding_attention",
951
+ "sliding_attention",
952
+ "full_attention",
953
+ "sliding_attention",
954
+ "sliding_attention",
955
+ "sliding_attention",
956
+ "sliding_attention",
957
+ "full_attention",
958
+ "sliding_attention",
959
+ "sliding_attention",
960
+ "sliding_attention",
961
+ "sliding_attention",
962
+ "full_attention"
963
+ ],
964
+ "max_position_embeddings": 131072,
965
+ "model_type": "gemma4_text",
966
+ "moe_intermediate_size": null,
967
+ "num_attention_heads": 8,
968
+ "num_experts": null,
969
+ "num_global_key_value_heads": null,
970
+ "num_hidden_layers": 35,
971
+ "num_key_value_heads": 1,
972
+ "num_kv_shared_layers": 20,
973
+ "pad_token_id": 0,
974
+ "rms_norm_eps": 1e-06,
975
+ "rope_parameters": {
976
+ "full_attention": {
977
+ "partial_rotary_factor": 0.25,
978
+ "rope_theta": 1000000.0,
979
+ "rope_type": "proportional"
980
+ },
981
+ "sliding_attention": {
982
+ "rope_theta": 10000.0,
983
+ "rope_type": "default"
984
+ }
985
+ },
986
+ "sliding_window": 512,
987
+ "tie_word_embeddings": true,
988
+ "top_k_experts": null,
989
+ "use_bidirectional_attention": null,
990
+ "use_cache": true,
991
+ "use_double_wide_mlp": true,
992
+ "vocab_size": 262144,
993
+ "vocab_size_per_layer_input": 262144
994
+ },
995
+ "tie_word_embeddings": true,
996
+ "transformers_version": "5.5.3",
997
+ "video_token_id": 258884,
998
+ "vision_config": {
999
+ "_name_or_path": "",
1000
+ "architectures": null,
1001
+ "attention_bias": false,
1002
+ "attention_dropout": 0.0,
1003
+ "chunk_size_feed_forward": 0,
1004
+ "default_output_length": 280,
1005
+ "dtype": "bfloat16",
1006
+ "global_head_dim": 64,
1007
+ "head_dim": 64,
1008
+ "hidden_activation": "gelu_pytorch_tanh",
1009
+ "hidden_size": 768,
1010
+ "id2label": {
1011
+ "0": "LABEL_0",
1012
+ "1": "LABEL_1"
1013
+ },
1014
+ "initializer_range": 0.02,
1015
+ "intermediate_size": 3072,
1016
+ "is_encoder_decoder": false,
1017
+ "label2id": {
1018
+ "LABEL_0": 0,
1019
+ "LABEL_1": 1
1020
+ },
1021
+ "max_position_embeddings": 131072,
1022
+ "model_type": "gemma4_vision",
1023
+ "num_attention_heads": 12,
1024
+ "num_hidden_layers": 16,
1025
+ "num_key_value_heads": 12,
1026
+ "output_attentions": false,
1027
+ "output_hidden_states": false,
1028
+ "patch_size": 16,
1029
+ "pooling_kernel_size": 3,
1030
+ "position_embedding_size": 10240,
1031
+ "problem_type": null,
1032
+ "return_dict": true,
1033
+ "rms_norm_eps": 1e-06,
1034
+ "rope_parameters": {
1035
+ "rope_theta": 100.0,
1036
+ "rope_type": "default"
1037
+ },
1038
+ "standardize": false,
1039
+ "use_clipped_linears": true
1040
+ },
1041
+ "vision_soft_tokens_per_image": 280
1042
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 2,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 1,
6
+ 106,
7
+ 50
8
+ ],
9
+ "pad_token_id": 0,
10
+ "temperature": 1.0,
11
+ "top_k": 64,
12
+ "top_p": 0.95,
13
+ "transformers_version": "5.5.3"
14
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e3458e3ce7473988ebafb1701b0a80621d783e09484994b940a7e644e0e943e
3
+ size 4359668843
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
processor_config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "audio_seq_length": 750,
3
+ "image_processor": {
4
+ "do_convert_rgb": true,
5
+ "do_normalize": false,
6
+ "do_rescale": true,
7
+ "do_resize": true,
8
+ "image_mean": [
9
+ 0.0,
10
+ 0.0,
11
+ 0.0
12
+ ],
13
+ "image_processor_type": "Gemma4ImageProcessor",
14
+ "image_seq_length": 280,
15
+ "image_std": [
16
+ 1.0,
17
+ 1.0,
18
+ 1.0
19
+ ],
20
+ "max_soft_tokens": 280,
21
+ "patch_size": 16,
22
+ "pooling_kernel_size": 3,
23
+ "resample": 3,
24
+ "rescale_factor": 0.00392156862745098,
25
+ "size": {
26
+ "height": 224,
27
+ "width": 224
28
+ }
29
+ },
30
+ "image_seq_length": 280,
31
+ "processor_class": "Gemma4Processor",
32
+ "feature_extractor": {
33
+ "feature_extractor_type": "Gemma4AudioFeatureExtractor",
34
+ "sampling_rate": 16000,
35
+ "num_mel_filters": 128,
36
+ "fft_length": 512,
37
+ "hop_length": 160,
38
+ "chunk_duration": 8.0,
39
+ "overlap_duration": 1.0
40
+ },
41
+ "audio_ms_per_token": 40
42
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc8d3a0ce36466ccc1278bf987df5f71db1719b9ca6b4118264f45cb627bfe0f
3
+ size 32169626
tokenizer_config.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "audio_token": "<|audio|>",
3
+ "backend": "tokenizers",
4
+ "boa_token": "<|audio>",
5
+ "boi_token": "<|image>",
6
+ "bos_token": "<bos>",
7
+ "eoa_token": "<audio|>",
8
+ "eoc_token": "<channel|>",
9
+ "eoi_token": "<image|>",
10
+ "eos_token": "<eos>",
11
+ "eot_token": "<turn|>",
12
+ "escape_token": "<|\"|>",
13
+ "etc_token": "<tool_call|>",
14
+ "etd_token": "<tool|>",
15
+ "etr_token": "<tool_response|>",
16
+ "extra_special_tokens": [
17
+ "<|video|>"
18
+ ],
19
+ "image_token": "<|image|>",
20
+ "is_local": true,
21
+ "mask_token": "<mask>",
22
+ "model_max_length": 1000000000000000019884624838656,
23
+ "model_specific_special_tokens": {
24
+ "audio_token": "<|audio|>",
25
+ "boa_token": "<|audio>",
26
+ "boi_token": "<|image>",
27
+ "eoa_token": "<audio|>",
28
+ "eoc_token": "<channel|>",
29
+ "eoi_token": "<image|>",
30
+ "eot_token": "<turn|>",
31
+ "escape_token": "<|\"|>",
32
+ "etc_token": "<tool_call|>",
33
+ "etd_token": "<tool|>",
34
+ "etr_token": "<tool_response|>",
35
+ "image_token": "<|image|>",
36
+ "soc_token": "<|channel>",
37
+ "sot_token": "<|turn>",
38
+ "stc_token": "<|tool_call>",
39
+ "std_token": "<|tool>",
40
+ "str_token": "<|tool_response>",
41
+ "think_token": "<|think|>"
42
+ },
43
+ "pad_token": "<pad>",
44
+ "padding_side": "left",
45
+ "processor_class": "Gemma4Processor",
46
+ "response_schema": {
47
+ "properties": {
48
+ "content": {
49
+ "type": "string"
50
+ },
51
+ "role": {
52
+ "const": "assistant"
53
+ },
54
+ "thinking": {
55
+ "type": "string"
56
+ },
57
+ "tool_calls": {
58
+ "items": {
59
+ "properties": {
60
+ "function": {
61
+ "properties": {
62
+ "arguments": {
63
+ "additionalProperties": {},
64
+ "type": "object",
65
+ "x-parser": "gemma4-tool-call"
66
+ },
67
+ "name": {
68
+ "type": "string"
69
+ }
70
+ },
71
+ "type": "object",
72
+ "x-regex": "call\\:(?P<name>\\w+)(?P<arguments>\\{.*\\})"
73
+ },
74
+ "type": {
75
+ "const": "function"
76
+ }
77
+ },
78
+ "type": "object"
79
+ },
80
+ "type": "array",
81
+ "x-regex-iterator": "<\\|tool_call>(.*?)<tool_call\\|>"
82
+ }
83
+ },
84
+ "type": "object",
85
+ "x-regex": "(\\<\\|channel\\>thought\\n(?P<thinking>.*?)\\<channel\\|\\>)?(?P<content>(?:(?!\\<\\|tool_call\\>)(?!\\<turn\\|\\>).)+)?(?P<tool_calls>\\<\\|tool_call\\>.*\\<tool_call\\|\\>)?(?:\\<turn\\|\\>)?"
86
+ },
87
+ "soc_token": "<|channel>",
88
+ "sot_token": "<|turn>",
89
+ "stc_token": "<|tool_call>",
90
+ "std_token": "<|tool>",
91
+ "str_token": "<|tool_response>",
92
+ "think_token": "<|think|>",
93
+ "tokenizer_class": "GemmaTokenizer",
94
+ "unk_token": "<unk>"
95
+ }