Update tokenizer_config.json
Browse files- tokenizer_config.json +2 -2
tokenizer_config.json
CHANGED
|
@@ -91,7 +91,7 @@
|
|
| 91 |
"special": true
|
| 92 |
},
|
| 93 |
"32008": {
|
| 94 |
-
"content": "<|
|
| 95 |
"lstrip": false,
|
| 96 |
"normalized": false,
|
| 97 |
"rstrip": true,
|
|
@@ -123,7 +123,7 @@
|
|
| 123 |
},
|
| 124 |
{
|
| 125 |
"name": "tool_use",
|
| 126 |
-
"template": "{{ bos_token }}{% set user_messages = messages | selectattr('role', 'equalto', 'user') | list %}{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|system|>' + '\n' + message['content'] + '<|end|>' + '\n'}}{% elif (message['role'] == 'user') %}{% if tools is defined and tools != None and message == user_messages[-1] %}{{'<|function_metadata|>' + '\n'
|
| 127 |
}
|
| 128 |
],
|
| 129 |
"clean_up_tokenization_spaces": false,
|
|
|
|
| 91 |
"special": true
|
| 92 |
},
|
| 93 |
"32008": {
|
| 94 |
+
"content": "<|function_metadata|>",
|
| 95 |
"lstrip": false,
|
| 96 |
"normalized": false,
|
| 97 |
"rstrip": true,
|
|
|
|
| 123 |
},
|
| 124 |
{
|
| 125 |
"name": "tool_use",
|
| 126 |
+
"template": "{{ bos_token }}{% set user_messages = messages | selectattr('role', 'equalto', 'user') | list %}{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|system|>' + '\n' + message['content'] + '<|end|>' + '\n'}}{% elif (message['role'] == 'user') %}{% if tools is defined and tools != None and message == user_messages[-1] %}{{'<|function_metadata|>' + '\n'}}{{ tools }}{{'\n'}}{% endif %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{% if message['tool_calls'] == None or message['tool_calls'] == '' %}{{message['content'] + '<|end|>' + '\n'}}{% else %}{{message['content'] + '<|tool_calls|>' + message['tool_calls'] + '<|end|>' + '\n'}}{% endif %}{% elif (message['role'] == 'tool_results') %}{{'<|tool_results|>' + '\n' + message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}"
|
| 127 |
}
|
| 128 |
],
|
| 129 |
"clean_up_tokenization_spaces": false,
|