Lesson_11 / app.py
ishaandtotoro's picture
Update app.py
f5aff27 verified
raw
history blame contribute delete
768 Bytes
from huggingface_hub import InferenceClient
import gradio as gr
import random
# Making requests to the model to generate responses:
client = InferenceClient('Qwen/Qwen2.5-72B-Instruct')
def respond(message, history):
#responses = ["Yes", "No"]
#return random.choice(responses)
messages = [{'role':'system', 'content': 'You are a friendly chatbot'}]
if history:
messages.extend(history)
messages.append({'role':'user', 'content': message})
response = client.chat_completion(messages, max_tokens=100, temperature=1.7, top_p=.3)
# Temp and top_p control randomness
return response['choices'][0]['message']['content'].strip()
chatbot = gr.ChatInterface(respond, type="messages")
chatbot.launch()