Yinpei Dai commited on
Commit
3982df3
·
1 Parent(s): 5c82a83
Files changed (3) hide show
  1. config.json +59 -0
  2. config.yaml +0 -0
  3. dataset_statistics.json +127 -0
config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "action_dim": 7,
3
+ "action_model_type": "DiT-L",
4
+ "consolidate_type": "tome",
5
+ "data_root_dir": "data",
6
+ "dataloader_type": "group",
7
+ "fusion_type": "gate",
8
+ "future_action_window_size": 15,
9
+ "group_size": 16,
10
+ "hf_token": "YOUR_HF_TOKEN",
11
+ "image_aug": true,
12
+ "is_resume": false,
13
+ "load_all_data_for_training": true,
14
+ "mem_length": 16,
15
+ "per_token_size": 256,
16
+ "pretrained_checkpoint": "./pretrained/openvla-7b-prismatic/checkpoints/step-295000-epoch-40-loss=0.2200.pt",
17
+ "repeated_diffusion_steps": 4,
18
+ "resume_epoch": 0,
19
+ "resume_step": 0,
20
+ "retrieval_layers": 2,
21
+ "run_id": "memvla_robomme--image_aug",
22
+ "run_id_note": null,
23
+ "run_root_dir": "log/robomme",
24
+ "save_interval": 10000,
25
+ "seed": 42,
26
+ "trackers": [
27
+ "jsonl",
28
+ "wandb"
29
+ ],
30
+ "update_fused": false,
31
+ "use_ema": false,
32
+ "use_timestep_pe": true,
33
+ "vla": {
34
+ "base_vlm": "prism-dinosiglip-224px+7b",
35
+ "data_mix": "robomme",
36
+ "enable_gradient_checkpointing": true,
37
+ "enable_mixed_precision_training": true,
38
+ "epochs": 100,
39
+ "expected_world_size": 2,
40
+ "freeze_llm_backbone": false,
41
+ "freeze_vision_backbone": false,
42
+ "global_batch_size": 64,
43
+ "learning_rate": 2e-05,
44
+ "lr_scheduler_type": "constant",
45
+ "max_grad_norm": 1.0,
46
+ "max_steps": 80500,
47
+ "per_device_batch_size": 32,
48
+ "reduce_in_full_precision": true,
49
+ "shuffle_buffer_size": 32000,
50
+ "train_strategy": "fsdp-full-shard",
51
+ "type": "prism-dinosiglip-224px+oxe+diffusion",
52
+ "unfreeze_last_llm_layer": false,
53
+ "vla_id": "prism-dinosiglip-224px+oxe+diffusion",
54
+ "warmup_ratio": 0.0,
55
+ "weight_decay": 0.0
56
+ },
57
+ "wandb_entity": "YOUR_WANDB_ENTITY",
58
+ "wandb_project": "memvla"
59
+ }
config.yaml ADDED
Binary file (1.39 kB). View file
 
dataset_statistics.json ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "robomme": {
3
+ "action": {
4
+ "mean": [
5
+ -0.00016778719145804644,
6
+ 9.925549966283143e-05,
7
+ -0.0016603461699560285,
8
+ -7.846341759432107e-06,
9
+ 0.0004085838154423982,
10
+ 4.4880340283270925e-05,
11
+ 0.13325271010398865
12
+ ],
13
+ "std": [
14
+ 0.003973749000579119,
15
+ 0.007152142468839884,
16
+ 0.008023738861083984,
17
+ 0.0014014379121363163,
18
+ 0.00410428736358881,
19
+ 0.011411894112825394,
20
+ 0.9796823859214783
21
+ ],
22
+ "max": [
23
+ 0.04991923272609711,
24
+ 0.18060597777366638,
25
+ 0.023674413561820984,
26
+ 0.059715017676353455,
27
+ 0.149556502699852,
28
+ 0.22754450142383575,
29
+ 1.3819525241851807
30
+ ],
31
+ "min": [
32
+ -0.041799940168857574,
33
+ -0.08721448481082916,
34
+ -0.04533028230071068,
35
+ -0.06907561421394348,
36
+ -0.06398998200893402,
37
+ -0.10732872784137726,
38
+ -1.0
39
+ ],
40
+ "q01": [
41
+ -0.012237973958253861,
42
+ -0.019052517712116242,
43
+ -0.022844786643981933,
44
+ -0.002837904579937458,
45
+ -0.0037610567267984153,
46
+ -0.04168713182210922,
47
+ -1.0
48
+ ],
49
+ "q99": [
50
+ 0.012791256010532373,
51
+ 0.019526026248931884,
52
+ 0.01747122585773464,
53
+ 0.002798897372558692,
54
+ 0.01629937455058075,
55
+ 0.04205296590924247,
56
+ 1.1190142774581842
57
+ ],
58
+ "mask": [
59
+ true,
60
+ true,
61
+ true,
62
+ true,
63
+ true,
64
+ true,
65
+ false
66
+ ]
67
+ },
68
+ "proprio": {
69
+ "mean": [
70
+ 0.0,
71
+ 0.0,
72
+ 0.0,
73
+ 0.0,
74
+ 0.0,
75
+ 0.0,
76
+ 0.0
77
+ ],
78
+ "std": [
79
+ 0.0,
80
+ 0.0,
81
+ 0.0,
82
+ 0.0,
83
+ 0.0,
84
+ 0.0,
85
+ 0.0
86
+ ],
87
+ "max": [
88
+ 0.0,
89
+ 0.0,
90
+ 0.0,
91
+ 0.0,
92
+ 0.0,
93
+ 0.0,
94
+ 0.0
95
+ ],
96
+ "min": [
97
+ 0.0,
98
+ 0.0,
99
+ 0.0,
100
+ 0.0,
101
+ 0.0,
102
+ 0.0,
103
+ 0.0
104
+ ],
105
+ "q01": [
106
+ 0.0,
107
+ 0.0,
108
+ 0.0,
109
+ 0.0,
110
+ 0.0,
111
+ 0.0,
112
+ 0.0
113
+ ],
114
+ "q99": [
115
+ 0.0,
116
+ 0.0,
117
+ 0.0,
118
+ 0.0,
119
+ 0.0,
120
+ 0.0,
121
+ 0.0
122
+ ]
123
+ },
124
+ "num_transitions": 769157,
125
+ "num_trajectories": 1600
126
+ }
127
+ }