Sakits's picture
Upload folder using huggingface_hub
73f68b0 verified
{
"dataset": {
"repo_id": "Sakits/so101_sorting_pickplace_stacking_20260219",
"root": null,
"episodes": null,
"image_transforms": {
"enable": false,
"max_num_transforms": 3,
"random_order": false,
"tfs": {
"brightness": {
"weight": 1.0,
"type": "ColorJitter",
"kwargs": {
"brightness": [
0.8,
1.2
]
}
},
"contrast": {
"weight": 1.0,
"type": "ColorJitter",
"kwargs": {
"contrast": [
0.8,
1.2
]
}
},
"saturation": {
"weight": 1.0,
"type": "ColorJitter",
"kwargs": {
"saturation": [
0.5,
1.5
]
}
},
"hue": {
"weight": 1.0,
"type": "ColorJitter",
"kwargs": {
"hue": [
-0.05,
0.05
]
}
},
"sharpness": {
"weight": 1.0,
"type": "SharpnessJitter",
"kwargs": {
"sharpness": [
0.5,
1.5
]
}
},
"affine": {
"weight": 1.0,
"type": "RandomAffine",
"kwargs": {
"degrees": [
-5.0,
5.0
],
"translate": [
0.05,
0.05
]
}
}
}
},
"revision": null,
"use_imagenet_stats": true,
"video_backend": "torchcodec",
"streaming": false
},
"env": null,
"policy": {
"type": "groot",
"n_obs_steps": 1,
"input_features": {
"observation.state": {
"type": "STATE",
"shape": [
6
]
},
"observation.images.wrist": {
"type": "VISUAL",
"shape": [
3,
480,
640
]
}
},
"output_features": {
"action": {
"type": "ACTION",
"shape": [
6
]
}
},
"device": "cuda",
"use_amp": false,
"push_to_hub": false,
"repo_id": null,
"private": null,
"tags": null,
"license": null,
"pretrained_path": "nvidia/GR00T-N1.5-3B",
"base_model_path": "nvidia/GR00T-N1.5-3B",
"eagle_path": null,
"tokenizer_assets_repo": "lerobot/eagle2hg-processor-groot-n1p5",
"tune_llm": true,
"tune_visual": true,
"eagle_select_layer": 12,
"eagle_project_to_dim": null,
"tune_projector": true,
"tune_diffusion_model": true,
"chunk_size": 16,
"n_action_steps": 16,
"max_state_dim": 64,
"max_action_dim": 32,
"noise_beta_alpha": 1.5,
"noise_beta_beta": 1.0,
"noise_s": 0.999,
"num_timestep_buckets": 1000,
"num_inference_steps": 4,
"max_num_embodiments": 32,
"embodiment_tag": "new_embodiment",
"action_head_hidden_size": 1024,
"action_head_input_embedding_dim": 1536,
"action_head_backbone_embedding_dim": 2048,
"add_pos_embed": true,
"max_seq_len": 1024,
"num_target_vision_tokens": 32,
"use_vlln": true,
"diffusion_model_cfg": {
"attention_head_dim": 48,
"cross_attention_dim": 2048,
"dropout": 0.2,
"final_dropout": true,
"interleave_self_attention": true,
"norm_type": "ada_norm",
"num_attention_heads": 32,
"num_layers": 16,
"output_dim": 1024,
"positional_embeddings": null
},
"vl_self_attention_cfg": {
"attention_head_dim": 64,
"dropout": 0.2,
"final_dropout": true,
"num_attention_heads": 32,
"num_layers": 4,
"positional_embeddings": null
},
"image_size": [
448,
448
],
"tokenizer_max_length": 200,
"use_bf16": true,
"compile_model": false,
"compile_mode": "max-autotune",
"dtype": "bfloat16",
"normalization_mapping": {
"VISUAL": "IDENTITY",
"STATE": "MEAN_STD",
"ACTION": "MEAN_STD"
},
"optimizer_lr": 0.0001,
"optimizer_betas": [
0.95,
0.999
],
"optimizer_eps": 1e-08,
"optimizer_weight_decay": 1e-05,
"optimizer_grad_clip_norm": 1.0,
"scheduler_warmup_steps": 500,
"scheduler_decay_steps": 10000,
"scheduler_decay_lr": 1e-05
},
"output_dir": "outputs/train/groot_async8_sorting_pickplace_stacking_20260224",
"job_name": "groot_async8_sorting_pickplace_stacking_20260224",
"resume": false,
"seed": 1000,
"num_workers": 4,
"batch_size": 32,
"steps": 50000,
"eval_freq": 20000,
"log_freq": 200,
"save_checkpoint": true,
"save_freq": 10000,
"use_policy_training_preset": false,
"optimizer": {
"type": "adamw",
"lr": 5e-05,
"weight_decay": 1e-10,
"grad_clip_norm": 10.0,
"betas": [
0.9,
0.95
],
"eps": 1e-08
},
"scheduler": {
"type": "cosine_decay_with_warmup",
"num_warmup_steps": 1000,
"num_decay_steps": 50000,
"peak_lr": 5e-05,
"decay_lr": 2.5e-06
},
"eval": {
"n_episodes": 50,
"batch_size": 50,
"use_async_envs": false
},
"wandb": {
"enable": true,
"disable_artifact": true,
"project": "vlash",
"entity": null,
"notes": null,
"run_id": "021ti8g6",
"mode": null
},
"checkpoint_path": null,
"rename_map": {},
"max_delay_steps": 8,
"grad_accum_steps": 1,
"shared_observation": false,
"lora": {
"enable": false,
"backend": "peft",
"r": 16,
"alpha": 16,
"dropout": 0.0,
"extra_trainable_modules": [],
"target_modules": [
"q_proj",
"k_proj",
"v_proj",
"o_proj",
"up_proj",
"down_proj",
"gate_proj"
],
"use_qlora": false,
"qlora_quant_type": "nf4",
"qlora_compute_dtype": "bfloat16"
}
}