| { |
| "add_prefix_space": false, |
| "added_tokens_decoder": { |
| "151643": { |
| "content": "<|endoftext|>", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| }, |
| "151644": { |
| "content": "<|im_start|>", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| }, |
| "151645": { |
| "content": "<|im_end|>", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| }, |
| "151646": { |
| "content": "<image>", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| } |
| }, |
| "additional_special_tokens": [ |
| "<image>" |
| ], |
| "bos_token": "<|im_start|>", |
| "chat_template": "{% if messages[0]['role'] == 'system' %}{{ bos_token + 'system\n' + messages[0]['content'] + eos_token}}{% set messages = messages[1:] %}{% endif %}{% for i in range(messages | length) %}{% set message = messages[i] %}{% if message['role'] == 'user' %}{% if add_stream_query_prompt %}{{ eos_token + '\n' + bos_token + 'user\n' + message['content'] + eos_token }}{% else %}{{ '\n' + bos_token + 'user\n' + message['content'] + eos_token }}{% endif %}{% elif message['role'] == 'assistant' %}{{ '\n' + bos_token + 'assistant\n' + message['content'] + eos_token }}{% elif message['role'] == 'stream' and message['num_frames'] > 0 %}{{ '\n' + bos_token + 'stream\n' + ''.join([49 * '<image>'] * message['num_frames']) + eos_token }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '\n' + bos_token + 'assistant\n' }}{% elif add_stream_prompt %}{{ '\n' + bos_token + 'stream\n' }}{% elif add_stream_generation_prompt %}{{ eos_token + '\n' + bos_token + 'assistant\n' }}{% endif %}", |
| "clean_up_tokenization_spaces": false, |
| "eos_token": "<|im_end|>", |
| "errors": "replace", |
| "model_max_length": 32768, |
| "pad_token": "<|endoftext|>", |
| "padding_side": "left", |
| "processor_class": "LlavaProcessor", |
| "split_special_tokens": false, |
| "tokenizer_class": "Qwen2Tokenizer", |
| "unk_token": null |
| } |
|
|