Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -12,7 +12,7 @@ tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
|
| 12 |
client = InferenceClient(checkpoint)
|
| 13 |
|
| 14 |
|
| 15 |
-
def
|
| 16 |
# Adjusting the format to fit the currently selected model
|
| 17 |
# First everything is converted into role format
|
| 18 |
# First a system prompt
|
|
@@ -31,6 +31,15 @@ def format_prompt(message, history, systemPrompt):
|
|
| 31 |
systemPromptMessage = f"<s>[INST] <<SYS>>\n{systemPrompt}\n<</SYS>>\n\n "
|
| 32 |
forPrompt=systemPromptMessage+str(messages)
|
| 33 |
return forPrompt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
|
| 36 |
def inference(message, history, systemPrompt=SYSTEM_PROMPT, temperature=0.9, maxTokens=512, topP=0.9, repPenalty=1.1):
|
|
|
|
| 12 |
client = InferenceClient(checkpoint)
|
| 13 |
|
| 14 |
|
| 15 |
+
def format_promptOld(message, history, systemPrompt):
|
| 16 |
# Adjusting the format to fit the currently selected model
|
| 17 |
# First everything is converted into role format
|
| 18 |
# First a system prompt
|
|
|
|
| 31 |
systemPromptMessage = f"<s>[INST] <<SYS>>\n{systemPrompt}\n<</SYS>>\n\n "
|
| 32 |
forPrompt=systemPromptMessage+str(messages)
|
| 33 |
return forPrompt
|
| 34 |
+
def format_prompt(message,chatbot,system_prompt):
|
| 35 |
+
if system_prompt != "":
|
| 36 |
+
input_prompt = f"<s>[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n "
|
| 37 |
+
else:
|
| 38 |
+
input_prompt = f"<s>[INST] "
|
| 39 |
+
for interaction in chatbot:
|
| 40 |
+
input_prompt = input_prompt + str(interaction[0]) + " [/INST] " + str(interaction[1]) + " </s><s>[INST] "
|
| 41 |
+
input_prompt = input_prompt + str(message) + " [/INST] "
|
| 42 |
+
return input_prompt
|
| 43 |
|
| 44 |
|
| 45 |
def inference(message, history, systemPrompt=SYSTEM_PROMPT, temperature=0.9, maxTokens=512, topP=0.9, repPenalty=1.1):
|