{ "model_name": "models--google--gemma-3-4b-it", "model_type": "vlm-gemma3", "vm_cfg": { "model_type": "siglip_vision_model", "arch": "siglip", "image_size": 896, "patch_size": 14, "cls_embed": false, "hidden_size": 1152, "intermediate_size": 4304, "num_attention_heads": 16, "num_hidden_layers": 27, "hidden_act": "gelu_pytorch_tanh", "layer_norm_eps": 1e-06 }, "mm_cfg": { "num_layers": 1, "hidden_act": "none", "mm_tokens_per_image": 256, "proj_dim": 2560 }, "lm_cfg": { "model_type": "gemma3_text", "data_type": "bfloat16", "arch": "gemma", "gen": "3", "size": "4b", "token_cfg": { "vocab_size": 262208 }, "rope_cfg": { "rope_theta": 1000000, "rope_local_base_freq": 10000, "rope_scaling": { "factor": 8.0, "low_freq_factor": 0, "high_freq_factor": 0, "original_max_position_embeddings": 0, "long_factor": null, "short_factor": null, "rope_type": "linear" } }, "attn_cfg": { "num_attention_heads": 8, "num_key_value_heads": 4, "head_dim": 256, "swa_enable": true, "swa_ratio": 5, "sliding_window": 1024, "attention_bias": false, "attention_dropout": 0.0, "query_pre_attn_scalar": 256 }, "mlp_cfg": { "intermediate_size": 10240, "act": "gelu_tanh", "num_layers": 3, "mlp_bias": false }, "hidden_size": 2560, "num_hidden_layers": 34, "max_position_embeddings": 2048, "rms_norm_eps": 1e-06, "rms_norm_unit_offset": true, "layer_norms": [ "pre_attn", "post_attn", "pre_ffn", "post_ffn", "qk_norm" ], "attn_logit_softcapping": null, "final_logit_softcapping": null, "lm_head_num_splits": 5, "lm_head_split_dim": 52448 }, "pipeline_cfg": { "system_prompt": null, "chat_template": null, "max_num_tokens": 2048, "input_token_group_size": 128, "input_token_group_offsets": [ 0, 128, 256, 384, 512, 640, 768, 896, 1024, 1152, 1280, 1408, 1536, 1664, 1792, 1920 ], "future_token_mask_size": 128, "return_logits": false, "use_strided_kv_cache": false, "enable_filter_sharing": true }, "language_model_name": "models--google--gemma-3-4b-it_language", "vision_model_name": "models--google--gemma-3-4b-it_vision" }