| { |
| "action_dim": 7, |
| "action_model_type": "DiT-B", |
| "data_root_dir": "/scr/user/data", |
| "debug": false, |
| "future_action_window_size": 15, |
| "hf_token": "hf_token", |
| "image_aug": true, |
| "is_resume": false, |
| "load_all_data_for_training": true, |
| "past_action_window_size": 3, |
| "pretrained_checkpoint": "path/to/ckpt", |
| "repeated_diffusion_steps": 4, |
| "resume_epoch": null, |
| "resume_step": null, |
| "run_id": "step3_HIS_Sil_prism-qwen25-dinosiglip-224px+bridge_rt_1+diffusion+n8+b16+x42--image_aug", |
| "run_id_note": null, |
| "run_root_dir": "outputs/step3_HIS_ditb_8_lr2e-5_b16_fa15_pa0_shuffle_bridge_rt_1", |
| "save_interval": 2500, |
| "seed": 42, |
| "trackers": [ |
| "jsonl", |
| "wandb" |
| ], |
| "use_ema": false, |
| "vla": { |
| "action_tokenizer": "extra_action_tokenizer", |
| "base_vlm": "prism-qwen25-extra-dinosiglip-224px+0_5b", |
| "data_mix": "bridge_rt_1", |
| "enable_gradient_checkpointing": true, |
| "enable_mixed_precision_training": true, |
| "epochs": 100, |
| "expected_world_size": 32, |
| "freeze_llm_backbone": false, |
| "freeze_vision_backbone": false, |
| "global_batch_size": 1024, |
| "learning_rate": 4e-05, |
| "lr_scheduler_type": "constant", |
| "max_grad_norm": 1.0, |
| "max_steps": null, |
| "per_device_batch_size": 32, |
| "reduce_in_full_precision": true, |
| "shuffle_buffer_size": 250000, |
| "train_strategy": "fsdp-full-shard", |
| "type": "prism-qwen25-dinosiglip-224px+0_5b", |
| "unfreeze_last_llm_layer": false, |
| "vla_id": "prism-qwen25-dinosiglip-224px+0_5b", |
| "warmup_ratio": 0.0, |
| "weight_decay": 0.0 |
| }, |
| "wandb_entity": "", |
| "wandb_project": "" |
| } |