libero_object / config.json
DannyJun's picture
Upload config.json with huggingface_hub
e650673 verified
{
"adapter_config": {
"attention_dropout": 0.0,
"float32_attention": true,
"head_dim": 72,
"hidden_act": "silu",
"hidden_size": 1152,
"image_feature_dropout": 0.0,
"image_padding_embed": null,
"initializer_range": 0.02,
"intermediate_size": 18944,
"model_type": "",
"num_attention_heads": 16,
"num_key_value_heads": 16,
"residual_dropout": 0.0,
"text_hidden_size": 3584,
"vit_layers": [
-3,
-9
]
},
"architectures": [
"SPRVLAForActionReasoning"
],
"auto_map": {
"AutoConfig": "configuration_sprvla.SPRVLAConfig",
"AutoModelForImageTextToText": "modeling_sprvla.SPRVLAForActionReasoning"
},
"image_patch_id": 152066,
"initializer_range": 0.02,
"llm_config": {
"additional_vocab_size": 128,
"attention_dropout": 0.0,
"embedding_dropout": 0.0,
"head_dim": 128,
"hidden_act": "silu",
"hidden_size": 3584,
"initializer_range": 0.02,
"intermediate_size": 18944,
"layer_norm_eps": 1e-06,
"max_position_embeddings": 4096,
"model_type": "sprvla_llm",
"norm_after": false,
"num_attention_heads": 28,
"num_hidden_layers": 28,
"num_key_value_heads": 4,
"qk_norm_type": "olmo",
"qkv_bias": true,
"residual_dropout": 0.0,
"rope_scaling": null,
"rope_theta": 1000000.0,
"use_cache": true,
"use_qk_norm": false,
"vocab_size": 152064
},
"model_type": "sprvla",
"n_action_bins": 256,
"norm_stats": {
"sprvla": {
"action": {
"max": [
0.06042003631591797,
0.09417290985584259,
0.07019275426864624,
0.2616892158985138,
0.11751057207584381,
0.16968433558940887,
1.0
],
"mean": [
0.0005706787342205644,
0.0002448957529850304,
-3.5987635783385485e-05,
0.00021597897284664214,
-0.0004896928439848125,
-0.000241481073317118,
0.5570635199546814
],
"min": [
-0.07434078305959702,
-0.07339745759963989,
-0.06539416313171387,
-0.1688285619020462,
-0.10289879888296127,
-0.2667275667190552,
0.0
],
"q01": [
-0.01538565568625927,
-0.021047022193670273,
-0.01688069850206375,
-0.044314172118902206,
-0.03890235349535942,
-0.04788423702120781,
0.0
],
"q99": [
0.014661382883787155,
0.026515591889619827,
0.021398313343524933,
0.04216696694493294,
0.03401297703385353,
0.04957397282123566,
1.0
],
"std": [
0.005207270849496126,
0.007506529800593853,
0.006415561307221651,
0.013248044066131115,
0.010928540490567684,
0.014873150736093521,
0.49715080857276917
]
},
"num_entries": 1560068
},
"libero_object_no_noops_modified": {
"action": {
"max": [
0.9375,
0.8919642567634583,
0.9375,
0.17678570747375488,
0.35035714507102966,
0.1810714304447174,
1.0
],
"mean": [
0.07096529006958008,
0.13498851656913757,
-0.04601382836699486,
0.00123520044144243,
0.006998839322477579,
-0.015027612447738647,
0.46428999304771423
],
"min": [
-0.8839285969734192,
-0.9375,
-0.9375,
-0.15000000596046448,
-0.29035714268684387,
-0.32892856001853943,
0.0
],
"q01": [
-0.5383928418159485,
-0.8758928775787354,
-0.9375,
-0.06964285671710968,
-0.11678571254014969,
-0.15964286029338837,
0.0
],
"q99": [
0.8464285731315613,
0.84375,
0.9375,
0.08142857253551483,
0.14892856776714325,
0.0867857113480568,
1.0
],
"std": [
0.2681235373020172,
0.43846824765205383,
0.4474974274635315,
0.024446550756692886,
0.049355510622262955,
0.042107198387384415,
0.49879148602485657
]
},
"num_trajectories": 454,
"num_transitions": 66984,
"proprio": {
"max": [
0.14580604434013367,
0.33216384053230286,
0.3857804834842682,
3.4003844261169434,
0.7954911589622498,
0.6642207503318787,
0.0,
0.04104341194033623,
-0.00018117300351150334
],
"mean": [
-0.02999030612409115,
-0.007947085425257683,
0.20293472707271576,
3.1086409091949463,
-0.21404768526554108,
-0.11307074874639511,
0.0,
0.029380427673459053,
-0.030556727200746536
],
"min": [
-0.1765444278717041,
-0.29457300901412964,
0.008128180168569088,
2.2890501022338867,
-1.883241891860962,
-1.0600427389144897,
0.0,
0.0006495157140307128,
-0.041782498359680176
],
"q01": [
-0.14911890715360643,
-0.25978428691625594,
0.009925739830359817,
2.7545341420173646,
-1.3996034812927245,
-0.6867720144987106,
0.0,
0.008197814421728254,
-0.04015838988125324
],
"q99": [
0.09063626825809479,
0.29066365867853167,
0.3370887073874472,
3.2611824750900267,
0.32092821151018125,
0.4037663781642913,
0.0,
0.039891827926039694,
-0.009106044843792932
],
"std": [
0.06694897264242172,
0.17608462274074554,
0.07807064801454544,
0.0868484303355217,
0.33540457487106323,
0.20728276669979095,
0.0,
0.00956575945019722,
0.009197483770549297
]
}
}
},
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.52.1",
"use_cache": true,
"vit_config": {
"attention_dropout": 0.0,
"float32_attention": true,
"head_dim": 72,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 1152,
"image_default_input_size": [
378,
378
],
"image_num_pos": 729,
"image_patch_size": 14,
"initializer_range": 0.02,
"intermediate_size": 4304,
"layer_norm_eps": 1e-06,
"model_type": "sprvla_vit",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"num_key_value_heads": 16,
"patch_bias": true,
"pre_layernorm": false,
"residual_dropout": 0.0,
"use_cls_token": false
}
}