File size: 290 Bytes
7c27d36 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
{
"architectures": [
"HfMoondream"
],
"auto_map": {
"AutoConfig": "hf_moondream.HfConfig",
"AutoModelForCausalLM": "hf_moondream.HfMoondream"
},
"config": {},
"model_type": "moondream1",
"torch_dtype": "bfloat16",
"transformers_version": "4.52.4"
}
|