| { | |
| "norm_stats": { | |
| "libero_10_no_noops": { | |
| "action": { | |
| "mean": [ | |
| 0.01820324920117855, | |
| 0.05858374014496803, | |
| -0.05592384561896324, | |
| 0.004626928828656673, | |
| 0.00289608770981431, | |
| -0.007673131301999092, | |
| 0.5457824468612671 | |
| ], | |
| "std": [ | |
| 0.2825464606285095, | |
| 0.35904666781425476, | |
| 0.3673802614212036, | |
| 0.03770702704787254, | |
| 0.05429719388484955, | |
| 0.08725254982709885, | |
| 0.49815231561660767 | |
| ], | |
| "max": [ | |
| 0.9375, | |
| 0.9375, | |
| 0.9375, | |
| 0.30000001192092896, | |
| 0.29357144236564636, | |
| 0.375, | |
| 1.0 | |
| ], | |
| "min": [ | |
| -0.9375, | |
| -0.9375, | |
| -0.9375, | |
| -0.23642857372760773, | |
| -0.3053571283817291, | |
| -0.3675000071525574, | |
| 0.0 | |
| ], | |
| "q01": [ | |
| -0.6348214149475098, | |
| -0.7741071581840515, | |
| -0.7633928656578064, | |
| -0.09749999642372131, | |
| -0.14819999992847435, | |
| -0.2742857038974762, | |
| 0.0 | |
| ], | |
| "q99": [ | |
| 0.7714285850524902, | |
| 0.8464285731315613, | |
| 0.9375, | |
| 0.13928571343421936, | |
| 0.15964286029338837, | |
| 0.3246428668498993, | |
| 1.0 | |
| ], | |
| "mask": [ | |
| true, | |
| true, | |
| true, | |
| true, | |
| true, | |
| true, | |
| false | |
| ] | |
| }, | |
| "proprio": { | |
| "mean": [ | |
| -0.04190658777952194, | |
| 0.03539430722594261, | |
| 0.8257141709327698, | |
| 2.908308267593384, | |
| -0.5562185049057007, | |
| -0.16649018228054047, | |
| 0.028316624462604523, | |
| -0.028561657294631004 | |
| ], | |
| "std": [ | |
| 0.10743364691734314, | |
| 0.14424669742584229, | |
| 0.2572328448295593, | |
| 0.3441362977027893, | |
| 1.234421730041504, | |
| 0.3579835891723633, | |
| 0.013308707624673843, | |
| 0.013174631632864475 | |
| ], | |
| "max": [ | |
| 0.21031762659549713, | |
| 0.39128610491752625, | |
| 1.3332009315490723, | |
| 3.6714255809783936, | |
| 3.560650587081909, | |
| 1.386339545249939, | |
| 0.04160946607589722, | |
| 0.0013633022317662835 | |
| ], | |
| "min": [ | |
| -0.4828203022480011, | |
| -0.3255046010017395, | |
| 0.445506751537323, | |
| 1.1321442127227783, | |
| -3.641430377960205, | |
| -1.842738389968872, | |
| -0.0010040868073701859, | |
| -0.04111652821302414 | |
| ], | |
| "q01": [ | |
| -0.3899900782108307, | |
| -0.2838300323486328, | |
| 0.44795057058334353, | |
| 1.8810229921340942, | |
| -2.886677579879761, | |
| -1.1599004411697387, | |
| 0.002066459748893976, | |
| -0.04001387819647789 | |
| ], | |
| "q99": [ | |
| 0.1530261474847791, | |
| 0.32915401458740223, | |
| 1.2546923208236693, | |
| 3.303542451858519, | |
| 2.7496529006957933, | |
| 0.6893712210655194, | |
| 0.040048558115959164, | |
| -0.0017598449345678235 | |
| ] | |
| }, | |
| "num_transitions": 101469, | |
| "num_trajectories": 379 | |
| } | |
| }, | |
| "n_action_bins": 256, | |
| "vision_backbone_id": "dinosiglip-vit-so-224px", | |
| "llm_backbone_id": "llama2-7b-pure", | |
| "arch_specifier": "no-align+fused-gelu-mlp", | |
| "output_projector_states": false, | |
| "use_fused_vision_backbone": true, | |
| "timm_model_ids": [ | |
| "vit_large_patch14_reg4_dinov2.lvd142m", | |
| "vit_so400m_patch14_siglip_224" | |
| ], | |
| "timm_override_act_layers": [ | |
| null, | |
| null | |
| ], | |
| "image_sizes": [ | |
| 224, | |
| 224 | |
| ], | |
| "image_resize_strategy": "resize-naive", | |
| "hf_llm_id": "meta-llama/Llama-2-7b-hf", | |
| "llm_max_length": 2048, | |
| "pad_token_id": 32000, | |
| "pad_to_multiple_of": 64, | |
| "text_config": { | |
| "vocab_size": 32064, | |
| "max_position_embeddings": 2048, | |
| "hidden_size": 4096, | |
| "intermediate_size": 11008, | |
| "num_hidden_layers": 32, | |
| "num_attention_heads": 32, | |
| "num_key_value_heads": 32, | |
| "hidden_act": "silu", | |
| "initializer_range": 0.02, | |
| "rms_norm_eps": 1e-06, | |
| "pretraining_tp": 1, | |
| "use_cache": true, | |
| "rope_theta": 10000.0, | |
| "rope_scaling": null, | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "return_dict": true, | |
| "output_hidden_states": false, | |
| "output_attentions": false, | |
| "torchscript": false, | |
| "torch_dtype": "bfloat16", | |
| "use_bfloat16": false, | |
| "tf_legacy_loss": false, | |
| "pruned_heads": {}, | |
| "tie_word_embeddings": false, | |
| "chunk_size_feed_forward": 0, | |
| "is_encoder_decoder": false, | |
| "is_decoder": false, | |
| "cross_attention_hidden_size": null, | |
| "add_cross_attention": false, | |
| "tie_encoder_decoder": false, | |
| "max_length": 20, | |
| "min_length": 0, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "num_beams": 1, | |
| "num_beam_groups": 1, | |
| "diversity_penalty": 0.0, | |
| "temperature": 1.0, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "typical_p": 1.0, | |
| "repetition_penalty": 1.0, | |
| "length_penalty": 1.0, | |
| "no_repeat_ngram_size": 0, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "bad_words_ids": null, | |
| "num_return_sequences": 1, | |
| "output_scores": false, | |
| "return_dict_in_generate": false, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "remove_invalid_values": false, | |
| "exponential_decay_length_penalty": null, | |
| "suppress_tokens": null, | |
| "begin_suppress_tokens": null, | |
| "architectures": null, | |
| "finetuning_task": null, | |
| "id2label": { | |
| "0": "LABEL_0", | |
| "1": "LABEL_1" | |
| }, | |
| "label2id": { | |
| "LABEL_0": 0, | |
| "LABEL_1": 1 | |
| }, | |
| "tokenizer_class": null, | |
| "prefix": null, | |
| "bos_token_id": 1, | |
| "pad_token_id": 32000, | |
| "eos_token_id": 2, | |
| "sep_token_id": null, | |
| "decoder_start_token_id": null, | |
| "task_specific_params": null, | |
| "problem_type": null, | |
| "_name_or_path": "", | |
| "model_type": "llama" | |
| }, | |
| "return_dict": true, | |
| "output_hidden_states": false, | |
| "output_attentions": false, | |
| "torchscript": false, | |
| "torch_dtype": "bfloat16", | |
| "use_bfloat16": false, | |
| "tf_legacy_loss": false, | |
| "pruned_heads": {}, | |
| "tie_word_embeddings": true, | |
| "chunk_size_feed_forward": 0, | |
| "is_encoder_decoder": false, | |
| "is_decoder": false, | |
| "cross_attention_hidden_size": null, | |
| "add_cross_attention": false, | |
| "tie_encoder_decoder": false, | |
| "max_length": 20, | |
| "min_length": 0, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "num_beams": 1, | |
| "num_beam_groups": 1, | |
| "diversity_penalty": 0.0, | |
| "temperature": 1.0, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "typical_p": 1.0, | |
| "repetition_penalty": 1.0, | |
| "length_penalty": 1.0, | |
| "no_repeat_ngram_size": 0, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "bad_words_ids": null, | |
| "num_return_sequences": 1, | |
| "output_scores": false, | |
| "return_dict_in_generate": false, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "remove_invalid_values": false, | |
| "exponential_decay_length_penalty": null, | |
| "suppress_tokens": null, | |
| "begin_suppress_tokens": null, | |
| "architectures": [ | |
| "OpenVLAForActionPrediction" | |
| ], | |
| "finetuning_task": null, | |
| "id2label": { | |
| "0": "LABEL_0", | |
| "1": "LABEL_1" | |
| }, | |
| "label2id": { | |
| "LABEL_0": 0, | |
| "LABEL_1": 1 | |
| }, | |
| "tokenizer_class": null, | |
| "prefix": null, | |
| "bos_token_id": null, | |
| "eos_token_id": null, | |
| "sep_token_id": null, | |
| "decoder_start_token_id": null, | |
| "task_specific_params": null, | |
| "problem_type": null, | |
| "_name_or_path": "/home/user1/.cache/huggingface/hub/models--openvla--openvla-7b/snapshots/31f090d05236101ebfc381b61c674dd4746d4ce0", | |
| "transformers_version": "4.40.1", | |
| "auto_map": { | |
| "AutoConfig": "configuration_prismatic.OpenVLAConfig", | |
| "AutoModelForVision2Seq": "modeling_prismatic.OpenVLAForActionPrediction" | |
| }, | |
| "model_type": "openvla" | |
| } |