Chat_with_Sheep / app.py
Sheepbon's picture
Update app.py
31ab8d7 verified
raw
history blame contribute delete
650 Bytes
from huggingface_hub import InferenceClient
import gradio as gr
import random
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
def respond(message, history):
messages = [{"role":"system", "content":"You are a friendly chatbot! :)"}]
if history:
messages.extend(history)
messages.append({"role":"user", "content":message})
response = client.chat_completion(messages, max_tokens = 100, temperature = 1.3, top_p = 0.3) #temp & top_p control random
print(response)
return response["choices"][0]["message"]["content"].strip()
chatbot = gr.ChatInterface(respond, type="messages")
chatbot.launch()