Spaces:
Build error
Build error
File size: 1,618 Bytes
64b27d8 968e3bb 5669072 0ab5946 5669072 4c697bd 5669072 41ce0fe 5669072 cea42eb 5669072 cea42eb 4067001 cea42eb d327900 64b27d8 81ebfb6 88999b3 968e3bb dc1b276 0112402 162adf4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import gradio as gr
import random
from huggingface_hub import InferenceClient
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") #change the LLM
def respond(message, history):
messages = [{"role": "system", "content": "You are a professional interviewer."}]
if history:
messages.extend(history)
messages.append({"role":"user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens = 100, #change the length
temperature = 0.5,
top_p = 0.7,
stream=True
):
token = message.choices[0].delta.content
response += token
yield response
def echo(message, history):
choices = ["Without a doubt", "Ask again later","Yes, definitely","Don't count on it"]
response = random.choice(choices)
return response
with gr.Blocks(css="""
#title { font-size: 28px; color: #00e676; text-align: center; }
.message.user { background-color: #1e88e5; color: white; border-radius: 12px; }
.message.bot { background-color: #424242; color: #eeeeee; border-radius: 12px; }
.chatbot { background-color: #121212; padding: 10px; border-radius: 16px; }
body { background-color: #111 !important; }
""") as demo:
gr.Markdown("# <div id='title'>🤖 AI Study Buddy</div>")
gr.ChatInterface(
respond,
type="messages",
description="Ask me anything about AI, ML, or Python!",
submit_btn="Send",
stop_btn="Stop",
chatbot=gr.Chatbot(show_label=False, elem_classes=["chatbot"])
)
demo.launch(debug=True)
chatbot.launch(debug=True)
|