AI_Chatbot / app.py
mmargg's picture
UPDATE
099109a verified
#import libraries here
import gradio as gr
import random
from huggingface_hub import InferenceClient
#AI API being used
client= InferenceClient("openai/gpt-oss-20b")
response=""
#defining role of AI and user
def respond(message,history):
system_message="You are acting like a comforting, guiding parent helping their child navigate academia."
messages = [{"role": "system", "content": system_message}]
if history:
messages.extend(history) #keep adding history
messages.append({"role":"user", "content": message})
response=client.chat_completion(messages, temperature=0.8, stream=True, max_tokens=100) #capping how many words the LLM is allowed to generate as a respond (300 words)
for message in client.chat_completion(messages):
token=message.choices[0].delta.content
response+=token
yield response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display
#Defining chatbot giving user a UI to interact, see their conversation history, and see new messages using built in gr feature
#ChatInterface requires at least one parameter(a function)
chatbot = gr.ChatInterface(respond,type="messages", title="AI Chatbot", theme="Taithrah/Minimal")
#launching chatbot
chatbot.launch()
#You may run into errors when you're trying different models. To see the error messages, set debug to True in launch()