madroid commited on
Commit
f9b6d8c
·
verified ·
1 Parent(s): 83e9aa1

Support tool_use chat_template

Browse files

The v3 version already supports tools ( function calling ) and is already supported in the [mistralai/Mixtral-8x22B-Instruct-v0.1](https://huggingface.co/mistralai/Mixtral-8x22B-Instruct-v0.1/blob/main/tokenizer_config.json) project, this part should be reusable.

Once added, it can be used when using the Huggingface API:

```
from transformers import AutoModelForCausalLM, AutoTokenizer

model_id = "mistralai/Mistral-7B-Instruct-v0.3"
tokenizer = AutoTokenizer.from_pretrained(model_id)
conversation=[
{"role": "user", "content": "What's the weather like in Paris?"},
{
"role": "tool_calls",
"content": [
{
"name": "get_current_weather",
"arguments": {"location": "Paris, France", "format": "celsius"},

}
]
},
{
"role": "tool_results",
"content": {"content": 22}
},
{"role": "assistant", "content": "The current temperature in Paris, France is 22 degrees Celsius."},
{"role": "user", "content": "What about San Francisco?"}
]


tools = [{"type": "function", "function": {"name":"get_current_weather", "description": "Get▁the▁current▁weather", "parameters": {"type": "object", "properties": {"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"}, "format": {"type": "string", "enum": ["celsius", "fahrenheit"], "description": "The temperature unit to use. Infer this from the users location."}},"required":["location","format"]}}}]

# render the tool use prompt as a string:
tool_use_prompt = tokenizer.apply_chat_template(
conversation,
chat_template="tool_use",
tools=tools,
tokenize=False,
add_generation_prompt=True,

)
model = AutoModelForCausalLM.from_pretrained(model_id)

inputs = tokenizer(tool_use_prompt, return_tensors="pt")

outputs = model.generate(**inputs, max_new_tokens=20)
print(tokenizer.decode(outputs[0], skip_special_tokens=True))

```

Files changed (1) hide show
  1. tokenizer_config.json +10 -1
tokenizer_config.json CHANGED
@@ -6173,7 +6173,16 @@
6173
  }
6174
  },
6175
  "bos_token": "<s>",
6176
- "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
 
 
 
 
 
 
 
 
 
6177
  "clean_up_tokenization_spaces": false,
6178
  "eos_token": "</s>",
6179
  "legacy": false,
 
6173
  }
6174
  },
6175
  "bos_token": "<s>",
6176
+ "chat_template": [
6177
+ {
6178
+ "name": "default",
6179
+ "template": "{{bos_token}}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ ' [INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ ' ' + message['content'] + ' ' + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}"
6180
+ },
6181
+ {
6182
+ "name": "tool_use",
6183
+ "template": "{{bos_token}}{% set user_messages = messages | selectattr('role', 'equalto', 'user') | list %}{% for message in messages %}{% if message['role'] == 'user' %}{% if message == user_messages[-1] %}{% if tools %}{{'[AVAILABLE_TOOLS]'+ tools|string + '[/AVAILABLE_TOOLS]'}}{% endif %}{{ '[INST]' + message['content'] + '[/INST]' }}{% else %}{{ '[INST]' + message['content'] + '[/INST]' }}{% endif %}{% elif message['role'] == 'assistant' %}{{ ' ' + message['content'] + ' ' + eos_token}}{% elif message['role'] == 'tool_results' %}{{'[TOOL_RESULTS]' + message['content']|string + '[/TOOL_RESULTS]'}}{% elif message['role'] == 'tool_calls' %}{{'[TOOL_CALLS]' + message['content']|string + eos_token}}{% endif %}{% endfor %}"
6184
+ }
6185
+ ],
6186
  "clean_up_tokenization_spaces": false,
6187
  "eos_token": "</s>",
6188
  "legacy": false,