File size: 277 Bytes
8c21234
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
{
  "architectures": [
    "HfMoondream"
  ],
  "auto_map": {
    "AutoConfig": "hf_moondream.HfConfig",
    "AutoModelForCausalLM": "hf_moondream.HfMoondream"
  },
  "config": {},
  "model_type": "moondream1",
  "torch_dtype": "bfloat16",
  "transformers_version": "4.51.1"
}