Spaces:
Runtime error
Runtime error
0.41 system prompt tweaks.
Browse files
app.py
CHANGED
|
@@ -19,10 +19,10 @@ login(token=HF_TOKEN)
|
|
| 19 |
|
| 20 |
|
| 21 |
system_prompts = {
|
| 22 |
-
"English": "You are a helpful chatbot that
|
| 23 |
-
"German": "Du bist ein hilfreicher Chatbot, der
|
| 24 |
-
"French": "
|
| 25 |
-
"Spanish": "Eres un chatbot
|
| 26 |
}
|
| 27 |
|
| 28 |
model_info = [{"id": "NousResearch/Meta-Llama-3.1-8B-Instruct",
|
|
@@ -56,9 +56,8 @@ except Exception as e:
|
|
| 56 |
|
| 57 |
|
| 58 |
def apply_chat_template(messages, add_generation_prompt=False):
|
| 59 |
-
"""
|
| 60 |
-
|
| 61 |
-
messages: List of dictionaries, each containing a 'role' and 'content'.
|
| 62 |
"""
|
| 63 |
pharia_template = """<|begin_of_text|>"""
|
| 64 |
role_map = {
|
|
@@ -67,13 +66,11 @@ def apply_chat_template(messages, add_generation_prompt=False):
|
|
| 67 |
"assistant": "<|start_header_id|>assistant<|end_header_id|>\n",
|
| 68 |
}
|
| 69 |
|
| 70 |
-
# Iterate through the messages and apply the template for each role
|
| 71 |
for message in messages:
|
| 72 |
role = message["role"]
|
| 73 |
content = message["content"]
|
| 74 |
pharia_template += role_map.get(role, "") + content + "<|eot_id|>\n"
|
| 75 |
|
| 76 |
-
# Add the assistant generation prompt if required
|
| 77 |
if add_generation_prompt:
|
| 78 |
pharia_template += "<|start_header_id|>assistant<|end_header_id|>\n"
|
| 79 |
|
|
@@ -209,18 +206,20 @@ with gr.Blocks() as demo:
|
|
| 209 |
with gr.Column():
|
| 210 |
gr.HTML("<center><h1>🤖le Royale</h1></center>")
|
| 211 |
gr.Markdown(arena_notes)
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
|
|
|
|
|
|
|
| 224 |
with gr.Row(variant="panel"):
|
| 225 |
with gr.Column(scale=1):
|
| 226 |
submit_btn = gr.Button(value="Generate", variant="primary")
|
|
|
|
| 19 |
|
| 20 |
|
| 21 |
system_prompts = {
|
| 22 |
+
"English": "You are a helpful chatbot that answers user input in a concise and witty way.",
|
| 23 |
+
"German": "Du bist ein hilfreicher Chatbot, der Usereingaben knapp und originell beantwortet.",
|
| 24 |
+
"French": "Tu es un chatbot utile qui répond aux questions des utilisateurs de manière concise et originale.",
|
| 25 |
+
"Spanish": "Eres un chatbot servicial que responde a las entradas de los usuarios de forma concisa y original."
|
| 26 |
}
|
| 27 |
|
| 28 |
model_info = [{"id": "NousResearch/Meta-Llama-3.1-8B-Instruct",
|
|
|
|
| 56 |
|
| 57 |
|
| 58 |
def apply_chat_template(messages, add_generation_prompt=False):
|
| 59 |
+
"""Adds chat template for Pharia. Expects a list of messages.
|
| 60 |
+
add_generation_prompt:bool extends tmplate for generation.
|
|
|
|
| 61 |
"""
|
| 62 |
pharia_template = """<|begin_of_text|>"""
|
| 63 |
role_map = {
|
|
|
|
| 66 |
"assistant": "<|start_header_id|>assistant<|end_header_id|>\n",
|
| 67 |
}
|
| 68 |
|
|
|
|
| 69 |
for message in messages:
|
| 70 |
role = message["role"]
|
| 71 |
content = message["content"]
|
| 72 |
pharia_template += role_map.get(role, "") + content + "<|eot_id|>\n"
|
| 73 |
|
|
|
|
| 74 |
if add_generation_prompt:
|
| 75 |
pharia_template += "<|start_header_id|>assistant<|end_header_id|>\n"
|
| 76 |
|
|
|
|
| 206 |
with gr.Column():
|
| 207 |
gr.HTML("<center><h1>🤖le Royale</h1></center>")
|
| 208 |
gr.Markdown(arena_notes)
|
| 209 |
+
with gr.Row(variant="panel"):
|
| 210 |
+
with gr.Column(scale=1):
|
| 211 |
+
language_dropdown = gr.Dropdown(
|
| 212 |
+
choices=["English", "German", "French", "Spanish"],
|
| 213 |
+
label="Select Language for System Prompt",
|
| 214 |
+
value="English"
|
| 215 |
+
)
|
| 216 |
+
with gr.Column():
|
| 217 |
+
system_prompt = gr.Textbox(
|
| 218 |
+
lines=1,
|
| 219 |
+
label="System Prompt",
|
| 220 |
+
value=system_prompts["English"],
|
| 221 |
+
show_copy_button=True
|
| 222 |
+
)
|
| 223 |
with gr.Row(variant="panel"):
|
| 224 |
with gr.Column(scale=1):
|
| 225 |
submit_btn = gr.Button(value="Generate", variant="primary")
|