NiryoTeam commited on
Commit
8c54c4f
·
verified ·
1 Parent(s): 4591a68

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +87 -0
config.json ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "smolvla",
3
+ "n_obs_steps": 1,
4
+ "normalization_mapping": {
5
+ "VISUAL": "IDENTITY",
6
+ "STATE": "MEAN_STD",
7
+ "ACTION": "MEAN_STD"
8
+ },
9
+ "input_features": {
10
+ "observation.state": {
11
+ "type": "STATE",
12
+ "shape": [
13
+ 6
14
+ ]
15
+ },
16
+ "observation.image2": {
17
+ "type": "VISUAL",
18
+ "shape": [
19
+ 3,
20
+ 256,
21
+ 256
22
+ ]
23
+ },
24
+ "observation.image": {
25
+ "type": "VISUAL",
26
+ "shape": [
27
+ 3,
28
+ 256,
29
+ 256
30
+ ]
31
+ },
32
+ "observation.image3": {
33
+ "type": "VISUAL",
34
+ "shape": [
35
+ 3,
36
+ 256,
37
+ 256
38
+ ]
39
+ }
40
+ },
41
+ "output_features": {
42
+ "action": {
43
+ "type": "ACTION",
44
+ "shape": [
45
+ 6
46
+ ]
47
+ }
48
+ },
49
+ "chunk_size": 50,
50
+ "n_action_steps": 50,
51
+ "max_state_dim": 32,
52
+ "max_action_dim": 32,
53
+ "resize_imgs_with_padding": [
54
+ 512,
55
+ 512
56
+ ],
57
+ "empty_cameras": 0,
58
+ "adapt_to_pi_aloha": false,
59
+ "use_delta_joint_actions_aloha": false,
60
+ "tokenizer_max_length": 48,
61
+ "num_steps": 10,
62
+ "use_cache": true,
63
+ "freeze_vision_encoder": true,
64
+ "train_expert_only": true,
65
+ "train_state_proj": true,
66
+ "optimizer_lr": 0.0001,
67
+ "optimizer_betas": [
68
+ 0.9,
69
+ 0.95
70
+ ],
71
+ "optimizer_eps": 1e-08,
72
+ "optimizer_weight_decay": 1e-10,
73
+ "optimizer_grad_clip_norm": 10,
74
+ "scheduler_warmup_steps": 1000,
75
+ "scheduler_decay_steps": 30000,
76
+ "scheduler_decay_lr": 2.5e-06,
77
+ "vlm_model_name": "HuggingFaceTB/SmolVLM2-500M-Video-Instruct",
78
+ "load_vlm_weights": true,
79
+ "attention_mode": "cross_attn",
80
+ "prefix_length": 0,
81
+ "pad_language_to": "max_length",
82
+ "num_expert_layers": 0,
83
+ "num_vlm_layers": 16,
84
+ "self_attn_every_n_layers": 2,
85
+ "expert_width_multiplier": 0.75
86
+ }
87
+