Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,10 +4,10 @@ from huggingface_hub import InferenceClient
|
|
| 4 |
|
| 5 |
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
|
| 6 |
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
|
| 12 |
def respond(message, history):
|
| 13 |
messages = [{"role": "system", "content": "You are a sassy chatbot from the 1800s."}]
|
|
@@ -21,7 +21,7 @@ def respond(message, history):
|
|
| 21 |
max_tokens = 100,
|
| 22 |
stream = True,
|
| 23 |
):
|
| 24 |
-
token =
|
| 25 |
response += token
|
| 26 |
yield response
|
| 27 |
|
|
|
|
| 4 |
|
| 5 |
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
|
| 6 |
|
| 7 |
+
def echo(message, history):
|
| 8 |
+
choices = ["yes", "no", "silly ahh question", "not likely", "ask again", "absolutely", "YASSS"]
|
| 9 |
+
yes_or_no = random.choice(choices)
|
| 10 |
+
return yes_or_no
|
| 11 |
|
| 12 |
def respond(message, history):
|
| 13 |
messages = [{"role": "system", "content": "You are a sassy chatbot from the 1800s."}]
|
|
|
|
| 21 |
max_tokens = 100,
|
| 22 |
stream = True,
|
| 23 |
):
|
| 24 |
+
token = delta.get("choices", [{}])[0].get("delta", {}).get("content", "")
|
| 25 |
response += token
|
| 26 |
yield response
|
| 27 |
|