DannyJun commited on
Commit
ffa5117
·
verified ·
1 Parent(s): 1220231

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +7 -7
config.json CHANGED
@@ -20,11 +20,11 @@
20
  ]
21
  },
22
  "architectures": [
23
- "MolmoActForActionReasoning"
24
  ],
25
  "auto_map": {
26
- "AutoConfig": "configuration_molmoact.MolmoActConfig",
27
- "AutoModelForImageTextToText": "modeling_molmoact.MolmoActForActionReasoning"
28
  },
29
  "image_patch_id": 152066,
30
  "initializer_range": 0.02,
@@ -39,7 +39,7 @@
39
  "intermediate_size": 18944,
40
  "layer_norm_eps": 1e-06,
41
  "max_position_embeddings": 4096,
42
- "model_type": "molmoact_llm",
43
  "norm_after": false,
44
  "num_attention_heads": 28,
45
  "num_hidden_layers": 28,
@@ -53,10 +53,10 @@
53
  "use_qk_norm": false,
54
  "vocab_size": 152064
55
  },
56
- "model_type": "molmoact",
57
  "n_action_bins": 256,
58
  "norm_stats": {
59
- "molmoact": {
60
  "action": {
61
  "max": [
62
  0.06042003631591797,
@@ -263,7 +263,7 @@
263
  "initializer_range": 0.02,
264
  "intermediate_size": 4304,
265
  "layer_norm_eps": 1e-06,
266
- "model_type": "molmoact_vit",
267
  "num_attention_heads": 16,
268
  "num_hidden_layers": 27,
269
  "num_key_value_heads": 16,
 
20
  ]
21
  },
22
  "architectures": [
23
+ "SPRVLAForActionReasoning"
24
  ],
25
  "auto_map": {
26
+ "AutoConfig": "configuration_sprvla.SPRVLAConfig",
27
+ "AutoModelForImageTextToText": "modeling_sprvla.SPRVLAForActionReasoning"
28
  },
29
  "image_patch_id": 152066,
30
  "initializer_range": 0.02,
 
39
  "intermediate_size": 18944,
40
  "layer_norm_eps": 1e-06,
41
  "max_position_embeddings": 4096,
42
+ "model_type": "sprvla_llm",
43
  "norm_after": false,
44
  "num_attention_heads": 28,
45
  "num_hidden_layers": 28,
 
53
  "use_qk_norm": false,
54
  "vocab_size": 152064
55
  },
56
+ "model_type": "sprvla",
57
  "n_action_bins": 256,
58
  "norm_stats": {
59
+ "sprvla": {
60
  "action": {
61
  "max": [
62
  0.06042003631591797,
 
263
  "initializer_range": 0.02,
264
  "intermediate_size": 4304,
265
  "layer_norm_eps": 1e-06,
266
+ "model_type": "sprvla_vit",
267
  "num_attention_heads": 16,
268
  "num_hidden_layers": 27,
269
  "num_key_value_heads": 16,