| { |
| "arch_specifier": "no-align+fused-gelu-mlp", |
| "architectures": [ |
| "OpenVLAForActionPrediction" |
| ], |
| "hf_llm_id": "Qwen/Qwen2.5-0.5B", |
| "image_resize_strategy": "resize-naive", |
| "image_sizes": [ |
| 224, |
| 224 |
| ], |
| "llm_backbone_id": "qwen25-0_5b-extra", |
| "llm_max_length": 2048, |
| "model_type": "openvla", |
| "n_action_bins": 256, |
| "norm_stats": { |
| "bridge_orig": { |
| "action": { |
| "mask": [ |
| true, |
| true, |
| true, |
| true, |
| true, |
| true, |
| false |
| ], |
| "max": [ |
| 0.41691166162490845, |
| 0.25864794850349426, |
| 0.21218234300613403, |
| 3.122201919555664, |
| 1.8618112802505493, |
| 6.280478477478027, |
| 1.0 |
| ], |
| "mean": [ |
| 0.0002334208256797865, |
| 0.00013004716311115772, |
| -0.00012762544793076813, |
| -0.00015565627836622298, |
| -0.00040393657400272787, |
| 0.00023557731765322387, |
| 0.5764579772949219 |
| ], |
| "min": [ |
| -0.4007510244846344, |
| -0.13874775171279907, |
| -0.22553899884223938, |
| -3.2010786533355713, |
| -1.8618112802505493, |
| -6.279075622558594, |
| 0.0 |
| ], |
| "q01": [ |
| -0.02872725307941437, |
| -0.04170349963009357, |
| -0.026093858778476715, |
| -0.08092105075716972, |
| -0.09288699507713317, |
| -0.20718276381492615, |
| 0.0 |
| ], |
| "q99": [ |
| 0.028309678435325586, |
| 0.040855254605412394, |
| 0.040161586627364146, |
| 0.08192047759890528, |
| 0.07792850524187081, |
| 0.20382574498653397, |
| 1.0 |
| ], |
| "std": [ |
| 0.009765971451997757, |
| 0.013689194805920124, |
| 0.012667413800954819, |
| 0.028534049168229103, |
| 0.03063810057938099, |
| 0.07691364735364914, |
| 0.49737057089805603 |
| ] |
| }, |
| "num_trajectories": 60064, |
| "num_transitions": 2135463, |
| "proprio": { |
| "max": [ |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0 |
| ], |
| "mean": [ |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0 |
| ], |
| "min": [ |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0 |
| ], |
| "q01": [ |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0 |
| ], |
| "q99": [ |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0 |
| ], |
| "std": [ |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0, |
| 0.0 |
| ] |
| } |
| } |
| }, |
| "output_projector_states": false, |
| "pad_to_multiple_of": 64, |
| "pad_token_id": 151643, |
| "text_config": { |
| "_name_or_path": "Qwen/Qwen2.5-0.5B", |
| "architectures": [ |
| "Qwen2ForCausalLM" |
| ], |
| "bos_token_id": 151643, |
| "eos_token_id": 151643, |
| "hidden_size": 896, |
| "intermediate_size": 4864, |
| "max_position_embeddings": 32768, |
| "max_window_layers": 24, |
| "model_type": "qwen2", |
| "num_attention_heads": 14, |
| "num_hidden_layers": 24, |
| "num_key_value_heads": 2, |
| "pad_token_id": null, |
| "rms_norm_eps": 1e-06, |
| "rope_theta": 1000000.0, |
| "sliding_window": 32768, |
| "tie_word_embeddings": true, |
| "torch_dtype": "bfloat16", |
| "use_mrope": false, |
| "use_sliding_window": false, |
| "vocab_size": 151936 |
| }, |
| "timm_model_ids": [ |
| "vit_large_patch14_reg4_dinov2.lvd142m", |
| "vit_so400m_patch14_siglip_224" |
| ], |
| "timm_override_act_layers": [ |
| null, |
| null |
| ], |
| "torch_dtype": "bfloat16", |
| "transformers_version": "4.40.1", |
| "use_fused_vision_backbone": true, |
| "vision_backbone_id": "dinosiglip-vit-so-224px" |
| } |
|
|