rajammanabrolu commited on
Commit
ed2024b
·
1 Parent(s): aeff707

Update tiktoken.py

Browse files
Files changed (1) hide show
  1. tiktoken.py +8 -4
tiktoken.py CHANGED
@@ -173,11 +173,15 @@ class TiktokenTokenizerWrapper(PreTrainedTokenizer):
173
  Pinning default Chat ML template in case defaults change.
174
  """
175
  template = (
176
- "{% set system_message = '' %}"
177
- '{% if USE_DEFAULT_PROMPT == true %}'
178
- "{{'<|im_start|>system\n' + 'DEFAULT_SYSTEM_PROMPT'}}"
 
 
 
 
179
  '{% endif %}'
180
- '{% for message in messages %}'
181
  "{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}"
182
  '{% if (add_generation_prompt == true) %}'
183
  "{{'<|im_start|>' + 'assistant' + '\n'}}"
 
173
  Pinning default Chat ML template in case defaults change.
174
  """
175
  template = (
176
+ "{% if messages[0]['role'] == 'system' %}"
177
+ "{% set loop_messages = messages[1:] %}" # Extract system message if it's present
178
+ "{% set system_message = messages[0]['content'] %}"
179
+ "{% elif USE_DEFAULT_PROMPT == true and not 'system' in messages[0]['role']%}"
180
+ "{% set loop_messages = messages %}" # Extract system message if it's present
181
+ "{% set system_message = 'DEFAULT_SYSTEM_PROMPT' %}"
182
+ "{{'<|im_start|>system\n' + system_message}}"
183
  '{% endif %}'
184
+ '{% for message in loop_messages %}'
185
  "{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}"
186
  '{% if (add_generation_prompt == true) %}'
187
  "{{'<|im_start|>' + 'assistant' + '\n'}}"