File size: 244 Bytes
9cb4953 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
{
"model_type": "llama",
"architectures": [
"MllamaForCausalLM"
],
"torch_dtype": "float16",
"text_config": {
"model_type": "llama"
},
"vision_config": {
"model_type": "clip_vision_model",
"hidden_size": 1024
}
} |