Spaces:
Sleeping
Sleeping
Connecting to zephyr LLM
Browse files
app.py
CHANGED
|
@@ -1,14 +1,26 @@
|
|
| 1 |
import gradio as gr
|
|
|
|
| 2 |
import random
|
| 3 |
|
|
|
|
| 4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
-
def
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
chatbot.launch()
|
| 14 |
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
+
from huggingface_hub import InferenceClient
|
| 3 |
import random
|
| 4 |
|
| 5 |
+
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
| 6 |
|
| 7 |
+
#def echo(message, history):
|
| 8 |
+
#choices = ["Ask me later", "Not today, sweetie", "One day we'll know", "Indeed!", "Nahhhh the vibes off cuh", "Cro don't do it", "Oh skibidi", "Ts pmo icl", "All of this is fake", "YES GET IT GURL"]
|
| 9 |
+
#value = random.choice(choices)
|
| 10 |
+
#return value
|
| 11 |
|
| 12 |
+
def respond(message, history):
|
| 13 |
+
messages [{"role": "system", "content": "You are a friendly chatbot."}]
|
| 14 |
+
if history:
|
| 15 |
+
messages.extend(history)
|
| 16 |
+
#adds multiple value + types rather than just one
|
| 17 |
+
messages.append("{"role": "user", "content": message})
|
| 18 |
+
response = client.chat_completion(messages, max_tokens=0)
|
| 19 |
+
print(response)
|
| 20 |
+
return response['choices'][0]['message']['content'].strip()
|
| 21 |
+
|
| 22 |
+
chatbot = gr.ChatInterface(respond, type = "messages")
|
| 23 |
+
|
| 24 |
|
| 25 |
chatbot.launch()
|
| 26 |
|