mjschock commited on
Commit
332a6f7
·
verified ·
1 Parent(s): 5281357

Upload config

Browse files
Files changed (1) hide show
  1. config.json +10 -30
config.json CHANGED
@@ -1,35 +1,15 @@
1
  {
2
  "auto_map": {
3
- "AutoConfig": "configuration_mobilevlm.MobileVLMConfig"
4
  },
5
- "freeze_mm_mlp_adapter": false,
6
- "hidden_act": "silu",
7
- "hidden_size": 2048,
8
- "image_aspect_ratio": "pad",
9
- "image_grid_pinpoints": null,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 5632,
12
- "max_position_embeddings": 2048,
13
- "max_sequence_length": 2048,
14
- "mm_hidden_size": 1024,
15
- "mm_projector_type": "ldpnet",
16
- "mm_use_im_patch_token": false,
17
- "mm_use_im_start_end": false,
18
- "mm_vision_select_feature": "patch",
19
- "mm_vision_select_layer": -2,
20
- "mm_vision_tower": "openai/clip-vit-large-patch14-336",
21
- "model_type": "mobilevlm",
22
- "num_attention_heads": 16,
23
- "num_hidden_layers": 24,
24
- "num_key_value_heads": 16,
25
- "pretraining_tp": 1,
26
- "rms_norm_eps": 1e-06,
27
- "rope_scaling": null,
28
- "rope_theta": 10000.0,
29
  "transformers_version": "4.37.2",
30
- "tune_mm_mlp_adapter": false,
31
- "use_cache": true,
32
- "use_mm_proj": true,
33
- "vision_tower_type": "clip",
34
- "vocab_size": 32000
35
  }
 
1
  {
2
  "auto_map": {
3
+ "AutoConfig": "configuration_mamba.MambaConfig"
4
  },
5
+ "d_model": 768,
6
+ "fused_add_norm": true,
7
+ "model_type": "mamba",
8
+ "n_layer": 24,
9
+ "pad_vocab_size_multiple": 8,
10
+ "residual_in_fp32": true,
11
+ "rms_norm": true,
12
+ "ssm_cfg": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "transformers_version": "4.37.2",
14
+ "vocab_size": 50277
 
 
 
 
15
  }