| { | |
| "adapter_config": { | |
| "attention_dropout": 0.0, | |
| "float32_attention": true, | |
| "head_dim": 72, | |
| "hidden_act": "silu", | |
| "hidden_size": 1152, | |
| "image_feature_dropout": 0.0, | |
| "image_padding_embed": null, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 18944, | |
| "model_type": "", | |
| "num_attention_heads": 16, | |
| "num_key_value_heads": 16, | |
| "residual_dropout": 0.0, | |
| "text_hidden_size": 3584, | |
| "vit_layers": [ | |
| -3, | |
| -9 | |
| ] | |
| }, | |
| "architectures": [ | |
| "SPRVLAForActionReasoning" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_sprvla.SPRVLAConfig", | |
| "AutoModelForImageTextToText": "modeling_sprvla.SPRVLAForActionReasoning" | |
| }, | |
| "image_patch_id": 152066, | |
| "initializer_range": 0.02, | |
| "llm_config": { | |
| "additional_vocab_size": 128, | |
| "attention_dropout": 0.0, | |
| "embedding_dropout": 0.0, | |
| "head_dim": 128, | |
| "hidden_act": "silu", | |
| "hidden_size": 3584, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 18944, | |
| "layer_norm_eps": 1e-06, | |
| "max_position_embeddings": 4096, | |
| "model_type": "sprvla_llm", | |
| "norm_after": false, | |
| "num_attention_heads": 28, | |
| "num_hidden_layers": 28, | |
| "num_key_value_heads": 4, | |
| "qk_norm_type": "olmo", | |
| "qkv_bias": true, | |
| "residual_dropout": 0.0, | |
| "rope_scaling": null, | |
| "rope_theta": 1000000.0, | |
| "use_cache": true, | |
| "use_qk_norm": false, | |
| "vocab_size": 152064 | |
| }, | |
| "model_type": "sprvla", | |
| "n_action_bins": 256, | |
| "norm_stats": { | |
| "sprvla": { | |
| "action": { | |
| "max": [ | |
| 0.06042003631591797, | |
| 0.09417290985584259, | |
| 0.07019275426864624, | |
| 0.2616892158985138, | |
| 0.11751057207584381, | |
| 0.16968433558940887, | |
| 1.0 | |
| ], | |
| "mean": [ | |
| 0.0005706787342205644, | |
| 0.0002448957529850304, | |
| -3.5987635783385485e-05, | |
| 0.00021597897284664214, | |
| -0.0004896928439848125, | |
| -0.000241481073317118, | |
| 0.5570635199546814 | |
| ], | |
| "min": [ | |
| -0.07434078305959702, | |
| -0.07339745759963989, | |
| -0.06539416313171387, | |
| -0.1688285619020462, | |
| -0.10289879888296127, | |
| -0.2667275667190552, | |
| 0.0 | |
| ], | |
| "q01": [ | |
| -0.01538565568625927, | |
| -0.021047022193670273, | |
| -0.01688069850206375, | |
| -0.044314172118902206, | |
| -0.03890235349535942, | |
| -0.04788423702120781, | |
| 0.0 | |
| ], | |
| "q99": [ | |
| 0.014661382883787155, | |
| 0.026515591889619827, | |
| 0.021398313343524933, | |
| 0.04216696694493294, | |
| 0.03401297703385353, | |
| 0.04957397282123566, | |
| 1.0 | |
| ], | |
| "std": [ | |
| 0.005207270849496126, | |
| 0.007506529800593853, | |
| 0.006415561307221651, | |
| 0.013248044066131115, | |
| 0.010928540490567684, | |
| 0.014873150736093521, | |
| 0.49715080857276917 | |
| ] | |
| }, | |
| "num_entries": 1560068 | |
| }, | |
| "libero_spatial_no_noops_modified": { | |
| "action": { | |
| "max": [ | |
| 0.9375, | |
| 0.9375, | |
| 0.9375, | |
| 0.1971428543329239, | |
| 0.33642858266830444, | |
| 0.375, | |
| 1.0 | |
| ], | |
| "mean": [ | |
| 0.15312479436397552, | |
| 0.13707277178764343, | |
| -0.15526802837848663, | |
| -0.005176450591534376, | |
| -0.01120874285697937, | |
| -0.020194264128804207, | |
| 0.4578818082809448 | |
| ], | |
| "min": [ | |
| -0.9375, | |
| -0.9375, | |
| -0.9375, | |
| -0.1875, | |
| -0.3675000071525574, | |
| -0.36000001430511475, | |
| 0.0 | |
| ], | |
| "q01": [ | |
| -0.7454732114076613, | |
| -0.6616071462631226, | |
| -0.9375, | |
| -0.1071428582072258, | |
| -0.20678570866584778, | |
| -0.1842857152223587, | |
| 0.0 | |
| ], | |
| "q99": [ | |
| 0.9375, | |
| 0.8758928775787354, | |
| 0.9321428537368774, | |
| 0.1039285734295845, | |
| 0.17678570747375488, | |
| 0.14571428298950195, | |
| 1.0 | |
| ], | |
| "std": [ | |
| 0.41272708773612976, | |
| 0.34724321961402893, | |
| 0.50869220495224, | |
| 0.037266165018081665, | |
| 0.07244449853897095, | |
| 0.05762382969260216, | |
| 0.49827873706817627 | |
| ] | |
| }, | |
| "num_trajectories": 432, | |
| "num_transitions": 52970, | |
| "proprio": { | |
| "max": [ | |
| 0.1759040206670761, | |
| 0.3904820382595062, | |
| 1.3290715217590332, | |
| 3.4566118717193604, | |
| 1.2268599271774292, | |
| 1.0429412126541138, | |
| 0.0, | |
| 0.041053611785173416, | |
| 0.000775813648942858 | |
| ], | |
| "mean": [ | |
| -0.024462558329105377, | |
| 0.106529600918293, | |
| 1.0580483675003052, | |
| 3.0628468990325928, | |
| -0.10464039444923401, | |
| 0.08307311683893204, | |
| 0.0, | |
| 0.01995457336306572, | |
| -0.020162804052233696 | |
| ], | |
| "min": [ | |
| -0.3095473051071167, | |
| -0.29250794649124146, | |
| 0.9095591306686401, | |
| 2.497488260269165, | |
| -1.8006486892700195, | |
| -0.7207611203193665, | |
| 0.0, | |
| -0.0004703797458205372, | |
| -0.041536275297403336 | |
| ], | |
| "q01": [ | |
| -0.2727657300233841, | |
| -0.23721413239836692, | |
| 0.9160063165426254, | |
| 2.77949666261673, | |
| -1.3187511622905732, | |
| -0.41989982962608335, | |
| 0.0, | |
| 0.001503719249740243, | |
| -0.03989770736545324 | |
| ], | |
| "q99": [ | |
| 0.13529365032911292, | |
| 0.3629165390133857, | |
| 1.2862326657772063, | |
| 3.2829698753356933, | |
| 0.9332760351896285, | |
| 0.6325724506378171, | |
| 0.0, | |
| 0.039933966137468815, | |
| -0.001671919699292631 | |
| ], | |
| "std": [ | |
| 0.1101478561758995, | |
| 0.13784688711166382, | |
| 0.1044282391667366, | |
| 0.10451053828001022, | |
| 0.4112098217010498, | |
| 0.2176690548658371, | |
| 0.0, | |
| 0.017260896041989326, | |
| 0.0171116404235363 | |
| ] | |
| } | |
| } | |
| }, | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.52.1", | |
| "use_cache": true, | |
| "vit_config": { | |
| "attention_dropout": 0.0, | |
| "float32_attention": true, | |
| "head_dim": 72, | |
| "hidden_act": "gelu_pytorch_tanh", | |
| "hidden_size": 1152, | |
| "image_default_input_size": [ | |
| 378, | |
| 378 | |
| ], | |
| "image_num_pos": 729, | |
| "image_patch_size": 14, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 4304, | |
| "layer_norm_eps": 1e-06, | |
| "model_type": "sprvla_vit", | |
| "num_attention_heads": 16, | |
| "num_hidden_layers": 27, | |
| "num_key_value_heads": 16, | |
| "patch_bias": true, | |
| "pre_layernorm": false, | |
| "residual_dropout": 0.0, | |
| "use_cls_token": false | |
| } | |
| } |