Aryan215gupta commited on
Commit
08e1461
·
verified ·
1 Parent(s): 32c9747

Update moondream2/config.json

Browse files
Files changed (1) hide show
  1. moondream2/config.json +2 -21
moondream2/config.json CHANGED
@@ -6,27 +6,8 @@
6
  "AutoConfig": "hf_moondream.HfConfig",
7
  "AutoModelForCausalLM": "hf_moondream.HfMoondream"
8
  },
9
- "config": {
10
- "hidden_size": 4096,
11
- "num_hidden_layers": 32,
12
- "num_attention_heads": 32,
13
- "intermediate_size": 16384,
14
- "max_position_embeddings": 2048,
15
- "hidden_act": "gelu_new",
16
- "initializer_range": 0.02,
17
- "layer_norm_eps": 1e-5,
18
- "vocab_size": 51200,
19
- "attention_dropout": 0.0,
20
- "embd_pdrop": 0.0,
21
- "resid_pdrop": 0.0,
22
- "use_cache": true,
23
- "tie_word_embeddings": false,
24
- "num_key_value_heads": 32,
25
- "partial_rotary_factor": 0.5,
26
- "rope_theta": 10000.0,
27
- "qk_layernorm": false
28
- },
29
  "model_type": "moondream1",
30
  "torch_dtype": "bfloat16",
31
  "transformers_version": "4.52.4"
32
- }
 
6
  "AutoConfig": "hf_moondream.HfConfig",
7
  "AutoModelForCausalLM": "hf_moondream.HfMoondream"
8
  },
9
+ "config": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  "model_type": "moondream1",
11
  "torch_dtype": "bfloat16",
12
  "transformers_version": "4.52.4"
13
+ }