Ctaake commited on
Commit
bea8ee4
·
verified ·
1 Parent(s): 7447c29

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -46,13 +46,15 @@ def format_prompt_gemma(message,chatbot,sytem_prompt=SYSTEM_PROMPT+SYSTEM_PROMPT
46
  return newPrompt
47
 
48
  def format_prompt_nous(message,chatbot,sytem_prompt=SYSTEM_PROMPT+SYSTEM_PROMPT_PLUS):
49
- fullPrompt =f"<|im_start|>system/n{sytem_prompt}<|im_end|>/n"
50
  for user_message, bot_message in chatbot:
51
- fullPrompt +=f"<|im_start|>user/n{user_message}<|im_end|>/n"
52
- fullPrompt +=f"<|im_start|>assistant/n{bot_message}<|im_end|>/n"
53
- fullPrompt +=f"<|im_start|>user/n{message}<|im_end|>/n"
54
- #fullPrompt +=f"<|im_start|>assistant"
55
- return fullPrompt
 
 
56
 
57
  match checkpoint:
58
  case "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO":
 
46
  return newPrompt
47
 
48
  def format_prompt_nous(message,chatbot,sytem_prompt=SYSTEM_PROMPT+SYSTEM_PROMPT_PLUS):
49
+ messages = [{"role": "system", "content": system_prompt}]
50
  for user_message, bot_message in chatbot:
51
+ messages.append({"role": "user", "content": user_message})
52
+ messages.append({"role": "assistant", "content": bot_message})
53
+ messages.append({"role": "user", "content": message})
54
+ newPrompt = tokenizer.apply_chat_template(
55
+ messages, tokenize=False, add_generation_prompt=True, return_tensors="pt")
56
+ print(newPrompt)
57
+ return newPrompt
58
 
59
  match checkpoint:
60
  case "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO":