GraspVLA / config.json
shengliangd's picture
Upload folder using huggingface_hub
f291eac verified
{
"data": {
"exp_name": null,
"robot": "epiclab_franka",
"proprio_len": 2,
"action_len": 4,
"action_dim": 7,
"goal_dim": 6,
"action_rel_len": 0,
"dt_steps": 3,
"tokenizer": null,
"image_transform": null,
"action_token_num": 256,
"img_steps": 1,
"img_key": [
"front",
"side"
],
"image_size": null,
"anything_prob": 0.2,
"robot_rep": "xyz_rpy",
"goal_rep": "xyz_rpy",
"tokenizer_type": "ratio_min_max_uniform",
"tokenizer_ratio_limit": 0.01,
"count_num": 10000,
"trans_noise": 0.01,
"rot_noise": 0.05,
"aug_img_config": {
"brightness": 0.2,
"contrast": 0.2,
"saturation": 0.2,
"hue": 0.05
},
"brightness_img": "front",
"brightness_threshold": 50.0,
"crop_mode": {},
"proprio_dim": 7,
"use_bbox": 1,
"pred": null
},
"model": {
"backbone_2d": {
"name": "dinosiglip",
"image_size": 224
},
"llm": {
"name": "internlm/internlm2-1_8b",
"max_len": 2048,
"special_tokens": [],
"pad_multiple_of": 64,
"attn_implementation": "flex_attention"
},
"ckpt": "latest-checkpoint.pt",
"pred": "cot_flow_matching",
"action_len": 4,
"action_dim": 7,
"proprio_dim": 7,
"action_expert": 1,
"action_expert_cfg": {
"hidden_size_scale": 2,
"intermediate_size_scale": 4,
"hidden_size": null,
"intermediate_size": null,
"hidden_act": null
},
"flow_matching_cfg": {
"beta_alpha": 1.5,
"beta_beta": 1.0,
"time_min": 0.001,
"time_max": 1.0
}
},
"dummy": null
}