Hunyuan-MT-7B-MNN / llm_config.json
wangjazz's picture
Upload 8 files
24cd02b verified
{
"hidden_size": 4096,
"layer_nums": 32,
"attention_mask": "float",
"key_value_shape": [
2,
1,
0,
8,
128
],
"bos": "",
"system_prompt_template": "%s",
"user_prompt_template": "%s",
"assistant_prompt_template": "%s",
"is_visual": false,
"attention_type": "full",
"jinja": {
"chat_template": "{% set ns = namespace(has_head=true) %}{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = message['content'] %}{% if loop.index0 == 0 %}{% if content == '' %}{% set ns.has_head = false %}{% elif message['role'] == 'system' %}{% set content = '<|startoftext|>' + content + '<|extra_4|>' %}{% endif %}{% endif %}{% if message['role'] == 'user' %}{% if loop.index0 == 1 and ns.has_head %}{% set content = content + '<|extra_0|>' %}{% else %}{% set content = '<|startoftext|>' + content + '<|extra_0|>' %}{% endif %}{% elif message['role'] == 'assistant' %}{% set content = content + '<|eos|>' %}{% endif %}{{ content }}{% endfor %}",
"bos": "<|startoftext|>",
"eos": "<|eos|>"
},
"tie_embeddings": [
4364276810,
4889613386,
65667072,
8,
64
]
}