Instructions to use INSAIT-Institute/arvla-bridge with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use INSAIT-Institute/arvla-bridge with Transformers:
# Load model directly from transformers import AutoModel model = AutoModel.from_pretrained("INSAIT-Institute/arvla-bridge", trust_remote_code=True, dtype="auto") - Notebooks
- Google Colab
- Kaggle
File size: 2,184 Bytes
2672775 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 | {
"processor_class": "VLARMHFProcessor",
"base_processor_model_id": "google/paligemma-3b-mix-224",
"vlarm_processor_config": {
"control_io_config": {
"future_controls_sequence_length": 5,
"future_controls_sequence_stride_sec": 0.2,
"future_frames_sequence_length": 1,
"future_frames_sequence_stride_sec": null,
"future_control_offset_sec": 0.0,
"past_scalars_sequence_length": 1,
"past_frames_sequence_length": 1,
"past_scalars_stride_sec": null,
"past_frames_stride_sec": null,
"sequence_frames": 1,
"sequence_frames_stride_sec": null
},
"obs_translation_norm": "bounds_q99",
"obs_rotation_norm": "none",
"translation_norm": {
"high": [
0.04,
0.04,
0.04
],
"low": [
-0.04,
-0.04,
-0.04
]
},
"rotation_norm": "none",
"joints_norm": {
"high": [
3.141592653589793,
3.141592653589793,
3.141592653589793,
3.141592653589793,
3.141592653589793,
3.141592653589793,
3.141592653589793
],
"low": [
-3.141592653589793,
-3.141592653589793,
-3.141592653589793,
-3.141592653589793,
-3.141592653589793,
-3.141592653589793,
-3.141592653589793
]
},
"rotation_format": "quaternion",
"eef_control_frame": false,
"delta_controls": true,
"image_resize": "naive",
"max_language_tokens": 75,
"control_tokenizer_config": {},
"control_stats_path": "/home/yutong/experiments/vlams/vlarm/sess_2025_11_14_11_29_01_msp3-3_yutong_hu_fast_vl+pt_ar.6/hf_export/hare-determined-crimson-transformers/stats/control_stats.yaml",
"observation_stats_path": "/home/yutong/experiments/vlams/vlarm/sess_2025_11_14_11_29_01_msp3-3_yutong_hu_fast_vl+pt_ar.6/hf_export/hare-determined-crimson-transformers/stats/observation_stats.yaml"
},
"vlm_processor_config": {
"image_token": "<image>",
"image_sizes": {
"main": {
"width": 224,
"height": 224
}
}
},
"auto_map": {
"AutoProcessor": "processing_vlarm_hf.VLARMHFProcessor"
}
} |