Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -11,16 +11,21 @@ client = InferenceClient("google/gemma-3-27b-it")
|
|
| 11 |
#return value
|
| 12 |
|
| 13 |
def respond(message, history):
|
| 14 |
-
messages = [{"role": "system", "content": "be a single mom from boston who divorced her 3rd husband yesterday"}]
|
| 15 |
#change content value to change personality
|
| 16 |
if history:
|
| 17 |
messages.extend(history)
|
| 18 |
#adds multiple value + types rather than just one
|
| 19 |
messages.append({"role": "user", "content": message})
|
| 20 |
-
response =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
#change max tokens to a greater number
|
| 22 |
-
|
| 23 |
-
return response['choices'][0]['message']['content'].strip()
|
| 24 |
|
| 25 |
chatbot = gr.ChatInterface(respond, type = "messages")
|
| 26 |
|
|
|
|
| 11 |
#return value
|
| 12 |
|
| 13 |
def respond(message, history):
|
| 14 |
+
messages = [{"role": "system", "content": "be a happy and sassy single mom from boston who divorced her 3rd husband yesterday"}]
|
| 15 |
#change content value to change personality
|
| 16 |
if history:
|
| 17 |
messages.extend(history)
|
| 18 |
#adds multiple value + types rather than just one
|
| 19 |
messages.append({"role": "user", "content": message})
|
| 20 |
+
response = ""
|
| 21 |
+
for messages in client.chat_completion(messages, max_tokens=100, stream=True):
|
| 22 |
+
token=messages.choices[0].delta.content
|
| 23 |
+
response+=token
|
| 24 |
+
yield response
|
| 25 |
+
|
| 26 |
#change max tokens to a greater number
|
| 27 |
+
|
| 28 |
+
#return response['choices'][0]['message']['content'].strip()
|
| 29 |
|
| 30 |
chatbot = gr.ChatInterface(respond, type = "messages")
|
| 31 |
|