vlm-1b-base / config.json
toilaluan's picture
Upload folder using huggingface_hub
dda845f verified
raw
history blame contribute delete
527 Bytes
{
"freeze_image_encoder": true,
"image_model_name": "google/siglip2-so400m-patch16-256",
"llm_model_name": "Qwen/Qwen3-0.6B",
"lora_attn_implementation": "sdpa",
"lora_dropout": 0.1,
"lora_hidden_size": 768,
"lora_intermediate_size": 3072,
"lora_mlp_ratio": 4,
"lora_num_attention_heads": 16,
"lora_num_layers": 16,
"lora_rank": 16,
"lora_target": "qkvm",
"lora_use_prefix": true,
"model_type": "vision_lora",
"prompt_aware": false,
"text_model_name": null,
"transformers_version": "4.57.3"
}