lemmy / config.json
lthn's picture
Upload config.json with huggingface_hub
b3bba13 verified
{
"architectures": [
"Gemma4ForConditionalGeneration"
],
"audio_config": null,
"audio_token_id": 258881,
"boa_token_id": 256000,
"boi_token_id": 255999,
"dtype": "bfloat16",
"eoa_token_id": 258883,
"eoa_token_index": 258883,
"eoi_token_id": 258882,
"eos_token_id": [
1,
106,
50
],
"image_token_id": 258880,
"initializer_range": 0.02,
"model_type": "gemma4",
"quantization": {
"group_size": 64,
"bits": 4,
"mode": "affine",
"language_model.model.layers.0.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.1.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.2.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.3.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.4.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.5.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.6.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.7.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.8.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.9.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.10.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.11.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.12.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.13.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.14.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.15.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.16.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.17.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.18.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.19.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.20.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.21.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.22.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.23.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.24.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.25.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.26.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.27.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.28.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.29.router.proj": {
"group_size": 64,
"bits": 8
}
},
"quantization_config": {
"group_size": 64,
"bits": 4,
"mode": "affine",
"language_model.model.layers.0.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.1.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.2.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.3.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.4.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.5.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.6.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.7.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.8.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.9.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.10.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.11.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.12.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.13.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.14.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.15.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.16.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.17.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.18.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.19.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.20.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.21.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.22.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.23.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.24.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.25.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.26.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.27.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.28.router.proj": {
"group_size": 64,
"bits": 8
},
"language_model.model.layers.29.router.proj": {
"group_size": 64,
"bits": 8
}
},
"text_config": {
"attention_bias": false,
"attention_dropout": 0.0,
"attention_k_eq_v": true,
"bos_token_id": 2,
"dtype": "bfloat16",
"enable_moe_block": true,
"eos_token_id": 1,
"final_logit_softcapping": 30.0,
"global_head_dim": 512,
"head_dim": 256,
"hidden_activation": "gelu_pytorch_tanh",
"hidden_size": 2816,
"hidden_size_per_layer_input": 0,
"initializer_range": 0.02,
"intermediate_size": 2112,
"layer_types": [
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"sliding_attention",
"full_attention"
],
"max_position_embeddings": 262144,
"model_type": "gemma4_text",
"moe_intermediate_size": 704,
"num_attention_heads": 16,
"num_experts": 128,
"num_global_key_value_heads": 2,
"num_hidden_layers": 30,
"num_key_value_heads": 8,
"num_kv_shared_layers": 0,
"pad_token_id": 0,
"rms_norm_eps": 1e-06,
"rope_parameters": {
"full_attention": {
"partial_rotary_factor": 0.25,
"rope_theta": 1000000.0,
"rope_type": "proportional"
},
"sliding_attention": {
"rope_theta": 10000.0,
"rope_type": "default"
}
},
"sliding_window": 1024,
"tie_word_embeddings": true,
"top_k_experts": 8,
"use_bidirectional_attention": "vision",
"use_cache": true,
"use_double_wide_mlp": false,
"vocab_size": 262144,
"vocab_size_per_layer_input": 262144
},
"tie_word_embeddings": true,
"transformers_version": "5.5.0.dev0",
"video_token_id": 258884,
"vision_soft_tokens_per_image": 280
}