File size: 7,436 Bytes
55dd87c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
{
    "dataset": {
        "repo_id": "Sakits/so101_sorting_pickplace_stacking_20260219",
        "root": null,
        "episodes": null,
        "image_transforms": {
            "enable": false,
            "max_num_transforms": 3,
            "random_order": false,
            "tfs": {
                "brightness": {
                    "weight": 1.0,
                    "type": "ColorJitter",
                    "kwargs": {
                        "brightness": [
                            0.8,
                            1.2
                        ]
                    }
                },
                "contrast": {
                    "weight": 1.0,
                    "type": "ColorJitter",
                    "kwargs": {
                        "contrast": [
                            0.8,
                            1.2
                        ]
                    }
                },
                "saturation": {
                    "weight": 1.0,
                    "type": "ColorJitter",
                    "kwargs": {
                        "saturation": [
                            0.5,
                            1.5
                        ]
                    }
                },
                "hue": {
                    "weight": 1.0,
                    "type": "ColorJitter",
                    "kwargs": {
                        "hue": [
                            -0.05,
                            0.05
                        ]
                    }
                },
                "sharpness": {
                    "weight": 1.0,
                    "type": "SharpnessJitter",
                    "kwargs": {
                        "sharpness": [
                            0.5,
                            1.5
                        ]
                    }
                },
                "affine": {
                    "weight": 1.0,
                    "type": "RandomAffine",
                    "kwargs": {
                        "degrees": [
                            -5.0,
                            5.0
                        ],
                        "translate": [
                            0.05,
                            0.05
                        ]
                    }
                }
            }
        },
        "revision": null,
        "use_imagenet_stats": true,
        "video_backend": "torchcodec",
        "streaming": false
    },
    "env": null,
    "policy": {
        "type": "groot",
        "n_obs_steps": 1,
        "input_features": {
            "observation.state": {
                "type": "STATE",
                "shape": [
                    6
                ]
            },
            "observation.images.wrist": {
                "type": "VISUAL",
                "shape": [
                    3,
                    480,
                    640
                ]
            }
        },
        "output_features": {
            "action": {
                "type": "ACTION",
                "shape": [
                    6
                ]
            }
        },
        "device": "cuda",
        "use_amp": false,
        "push_to_hub": false,
        "repo_id": null,
        "private": null,
        "tags": null,
        "license": null,
        "pretrained_path": "nvidia/GR00T-N1.5-3B",
        "base_model_path": "nvidia/GR00T-N1.5-3B",
        "eagle_path": null,
        "tokenizer_assets_repo": "lerobot/eagle2hg-processor-groot-n1p5",
        "tune_llm": true,
        "tune_visual": true,
        "eagle_select_layer": 12,
        "eagle_project_to_dim": null,
        "tune_projector": true,
        "tune_diffusion_model": true,
        "chunk_size": 16,
        "n_action_steps": 16,
        "max_state_dim": 64,
        "max_action_dim": 32,
        "noise_beta_alpha": 1.5,
        "noise_beta_beta": 1.0,
        "noise_s": 0.999,
        "num_timestep_buckets": 1000,
        "num_inference_steps": 4,
        "max_num_embodiments": 32,
        "embodiment_tag": "new_embodiment",
        "action_head_hidden_size": 1024,
        "action_head_input_embedding_dim": 1536,
        "action_head_backbone_embedding_dim": 2048,
        "add_pos_embed": true,
        "max_seq_len": 1024,
        "num_target_vision_tokens": 32,
        "use_vlln": true,
        "diffusion_model_cfg": {
            "attention_head_dim": 48,
            "cross_attention_dim": 2048,
            "dropout": 0.2,
            "final_dropout": true,
            "interleave_self_attention": true,
            "norm_type": "ada_norm",
            "num_attention_heads": 32,
            "num_layers": 16,
            "output_dim": 1024,
            "positional_embeddings": null
        },
        "vl_self_attention_cfg": {
            "attention_head_dim": 64,
            "dropout": 0.2,
            "final_dropout": true,
            "num_attention_heads": 32,
            "num_layers": 4,
            "positional_embeddings": null
        },
        "image_size": [
            448,
            448
        ],
        "tokenizer_max_length": 200,
        "use_bf16": true,
        "compile_model": false,
        "compile_mode": "max-autotune",
        "dtype": "bfloat16",
        "normalization_mapping": {
            "VISUAL": "IDENTITY",
            "STATE": "MEAN_STD",
            "ACTION": "MEAN_STD"
        },
        "optimizer_lr": 0.0001,
        "optimizer_betas": [
            0.95,
            0.999
        ],
        "optimizer_eps": 1e-08,
        "optimizer_weight_decay": 1e-05,
        "optimizer_grad_clip_norm": 1.0,
        "scheduler_warmup_steps": 500,
        "scheduler_decay_steps": 10000,
        "scheduler_decay_lr": 1e-05
    },
    "output_dir": "outputs/train/groot_async8_sorting_pickplace_stacking_20260225",
    "job_name": "groot_async8_sorting_pickplace_stacking_20260225",
    "resume": false,
    "seed": 1000,
    "num_workers": 4,
    "batch_size": 32,
    "steps": 50000,
    "eval_freq": 20000,
    "log_freq": 200,
    "save_checkpoint": true,
    "save_freq": 10000,
    "use_policy_training_preset": false,
    "optimizer": {
        "type": "adamw",
        "lr": 5e-05,
        "weight_decay": 1e-10,
        "grad_clip_norm": 10.0,
        "betas": [
            0.9,
            0.95
        ],
        "eps": 1e-08
    },
    "scheduler": {
        "type": "cosine_decay_with_warmup",
        "num_warmup_steps": 1000,
        "num_decay_steps": 50000,
        "peak_lr": 5e-05,
        "decay_lr": 2.5e-06
    },
    "eval": {
        "n_episodes": 50,
        "batch_size": 50,
        "use_async_envs": false
    },
    "wandb": {
        "enable": true,
        "disable_artifact": true,
        "project": "vlash",
        "entity": null,
        "notes": null,
        "run_id": "joapvbm9",
        "mode": null
    },
    "checkpoint_path": null,
    "rename_map": {},
    "max_delay_steps": 0,
    "grad_accum_steps": 1,
    "shared_observation": false,
    "lora": {
        "enable": false,
        "backend": "peft",
        "r": 16,
        "alpha": 16,
        "dropout": 0.0,
        "extra_trainable_modules": [],
        "target_modules": [
            "q_proj",
            "k_proj",
            "v_proj",
            "o_proj",
            "up_proj",
            "down_proj",
            "gate_proj"
        ],
        "use_qlora": false,
        "qlora_quant_type": "nf4",
        "qlora_compute_dtype": "bfloat16"
    }
}