| { |
| "architectures": [ |
| "Gemma4ForConditionalGeneration" |
| ], |
| "audio_config": { |
| "_name_or_path": "", |
| "architectures": null, |
| "attention_chunk_size": 12, |
| "attention_context_left": 13, |
| "attention_context_right": 0, |
| "attention_invalid_logits_value": -1000000000.0, |
| "attention_logit_cap": 50.0, |
| "chunk_size_feed_forward": 0, |
| "conv_kernel_size": 5, |
| "dtype": "bfloat16", |
| "gradient_clipping": 10000000000.0, |
| "hidden_act": "silu", |
| "hidden_size": 1024, |
| "id2label": { |
| "0": "LABEL_0", |
| "1": "LABEL_1" |
| }, |
| "initializer_range": 0.02, |
| "is_encoder_decoder": false, |
| "label2id": { |
| "LABEL_0": 0, |
| "LABEL_1": 1 |
| }, |
| "model_type": "gemma4_audio", |
| "num_attention_heads": 8, |
| "num_hidden_layers": 12, |
| "output_attentions": false, |
| "output_hidden_states": false, |
| "output_proj_dims": 1536, |
| "problem_type": null, |
| "residual_weight": 0.5, |
| "return_dict": true, |
| "rms_norm_eps": 1e-06, |
| "subsampling_conv_channels": [ |
| 128, |
| 32 |
| ], |
| "use_clipped_linears": true |
| }, |
| "audio_token_id": 258881, |
| "boa_token_id": 256000, |
| "boi_token_id": 255999, |
| "dtype": "bfloat16", |
| "eoa_token_id": 258883, |
| "eoa_token_index": 258883, |
| "eoi_token_id": 258882, |
| "eos_token_id": 1, |
| "image_token_id": 258880, |
| "initializer_range": 0.02, |
| "model_type": "gemma4", |
| "quantization": { |
| "group_size": 16, |
| "bits": 4, |
| "mode": "nvfp4", |
| "language_model.model.layers.0.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.0.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.0.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.1.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.1.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.1.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.2.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.2.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.2.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.3.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.3.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.3.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.4.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.4.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.4.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.5.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.5.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.5.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.6.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.6.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.6.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.7.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.7.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.7.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.8.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.8.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.8.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.9.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.9.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.9.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.10.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.10.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.10.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.11.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.11.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.11.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.12.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.12.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.12.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.13.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.13.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.13.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.14.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.14.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.14.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.15.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.15.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.15.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.16.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.16.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.16.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.17.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.17.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.17.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.18.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.18.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.18.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.19.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.19.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.19.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.20.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.20.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.20.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.21.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.21.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.21.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.22.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.22.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.22.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.23.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.23.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.23.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.24.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.24.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.24.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.25.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.25.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.25.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.26.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.26.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.26.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.27.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.27.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.27.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.28.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.28.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.28.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.29.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.29.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.29.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.30.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.30.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.30.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.31.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.31.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.31.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.32.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.32.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.32.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.33.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.33.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.33.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.34.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.34.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.34.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| } |
| }, |
| "quantization_config": { |
| "group_size": 16, |
| "bits": 4, |
| "mode": "nvfp4", |
| "language_model.model.layers.0.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.0.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.0.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.1.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.1.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.1.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.2.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.2.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.2.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.3.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.3.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.3.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.4.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.4.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.4.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.5.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.5.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.5.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.6.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.6.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.6.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.7.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.7.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.7.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.8.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.8.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.8.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.9.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.9.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.9.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.10.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.10.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.10.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.11.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.11.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.11.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.12.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.12.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.12.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.13.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.13.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.13.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.14.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.14.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.14.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.15.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.15.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.15.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.16.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.16.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.16.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.17.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.17.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.17.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.18.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.18.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.18.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.19.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.19.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.19.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.20.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.20.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.20.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.21.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.21.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.21.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.22.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.22.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.22.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.23.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.23.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.23.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.24.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.24.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.24.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.25.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.25.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.25.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.26.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.26.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.26.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.27.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.27.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.27.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.28.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.28.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.28.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.29.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.29.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.29.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.30.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.30.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.30.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.31.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.31.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.31.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.32.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.32.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.32.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.33.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.33.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.33.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.34.mlp.gate_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.34.mlp.down_proj": { |
| "group_size": 64, |
| "bits": 8 |
| }, |
| "language_model.model.layers.34.mlp.up_proj": { |
| "group_size": 64, |
| "bits": 8 |
| } |
| }, |
| "text_config": { |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "attention_k_eq_v": false, |
| "bos_token_id": 2, |
| "dtype": "bfloat16", |
| "enable_moe_block": false, |
| "eos_token_id": 1, |
| "expert_intermediate_size": null, |
| "final_logit_softcapping": 30.0, |
| "global_head_dim": 512, |
| "head_dim": 256, |
| "hidden_activation": "gelu_pytorch_tanh", |
| "hidden_size": 1536, |
| "hidden_size_per_layer_input": 256, |
| "initializer_range": 0.02, |
| "intermediate_size": 6144, |
| "layer_types": [ |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention" |
| ], |
| "max_position_embeddings": 131072, |
| "model_type": "gemma4_text", |
| "num_attention_heads": 8, |
| "num_experts": null, |
| "num_global_key_value_heads": null, |
| "num_hidden_layers": 35, |
| "num_key_value_heads": 1, |
| "num_kv_shared_layers": 20, |
| "pad_token_id": 0, |
| "rms_norm_eps": 1e-06, |
| "rope_parameters": { |
| "full_attention": { |
| "partial_rotary_factor": 0.25, |
| "rope_theta": 1000000.0, |
| "rope_type": "proportional" |
| }, |
| "sliding_attention": { |
| "rope_theta": 10000.0, |
| "rope_type": "default" |
| } |
| }, |
| "sliding_window": 512, |
| "tie_word_embeddings": true, |
| "top_k_experts": null, |
| "use_bidirectional_attention": null, |
| "use_cache": true, |
| "use_double_wide_mlp": true, |
| "vocab_size": 262144, |
| "vocab_size_per_layer_input": 262144 |
| }, |
| "tie_word_embeddings": true, |
| "transformers_version": "5.5.0.dev0", |
| "video_token_id": 258884, |
| "vision_config": { |
| "_name_or_path": "", |
| "architectures": null, |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "chunk_size_feed_forward": 0, |
| "default_output_length": 280, |
| "dtype": "bfloat16", |
| "global_head_dim": 64, |
| "head_dim": 64, |
| "hidden_activation": "gelu_pytorch_tanh", |
| "hidden_size": 768, |
| "id2label": { |
| "0": "LABEL_0", |
| "1": "LABEL_1" |
| }, |
| "initializer_range": 0.02, |
| "intermediate_size": 3072, |
| "is_encoder_decoder": false, |
| "label2id": { |
| "LABEL_0": 0, |
| "LABEL_1": 1 |
| }, |
| "max_position_embeddings": 131072, |
| "model_type": "gemma4_vision", |
| "num_attention_heads": 12, |
| "num_hidden_layers": 16, |
| "num_key_value_heads": 12, |
| "output_attentions": false, |
| "output_hidden_states": false, |
| "patch_size": 16, |
| "pooling_kernel_size": 3, |
| "position_embedding_size": 10240, |
| "problem_type": null, |
| "return_dict": true, |
| "rms_norm_eps": 1e-06, |
| "rope_parameters": { |
| "rope_theta": 100.0, |
| "rope_type": "default" |
| }, |
| "standardize": false, |
| "use_clipped_linears": true |
| }, |
| "vision_soft_tokens_per_image": 280 |
| } |