File size: 814 Bytes
0b639b1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
{
"aggregation_type": "mean",
"auto_mapping": {
"base_model_class": "LlamaForCausalLM",
"parent_library": "prompt.model.modeling_llama_custom"
},
"base_model_name_or_path": "mtgv/MobileLLaMA-1.4B-Chat",
"inference_mode": true,
"num_attention_heads": 16,
"num_exits": 1,
"num_layers": 24,
"num_special_tokens": 3,
"num_transformer_submodules": 1,
"num_virtual_tokens": 3,
"prefix_virtual_tokens": 10,
"prompt_tuning_init": "TEXT",
"prompt_tuning_init_text": "Next 2 word",
"revision": null,
"task_type": null,
"token_dim": 2048,
"tokenizer_kwargs": null,
"tokenizer_name_or_path": "mtgv/MobileLLaMA-1.4B-Chat",
"use_cache": true,
"use_custom_lm_head": false,
"use_prefix_tuning": false,
"virtual_tokens_per_special_token": 1,
"vt_attention_type": "ensemble"
} |