EdstemBot_Demo / app.py
noelstan99
phi model update
f2a47fd
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# Set page configuration - MUST be the first Streamlit command
st.set_page_config(
page_title="EdStem Chatbot Demo",
page_icon="🤖",
layout="wide",
)
# Load model and tokenizer
@st.cache_resource
def load_model():
model_name = "ML-GT/CS4641-7641-finetuned-phi-3-mini-128k-instruct" # Replace with the model path or identifier
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float32)
return tokenizer, model
tokenizer, model = load_model()
# App UI
st.title("🤖 EdStem Chatbot Demo")
st.markdown(
"""
This is a demo of a fine-tuned LLaMA 3.1 model for EdStem course-related Q&A.
Enter your questions below and see how the chatbot responds!
"""
)
# Initialize session state
if "messages" not in st.session_state:
st.session_state.messages = []
# Chat Interface
def add_message(user_message, bot_message):
"""Append user and bot messages to the session state."""
st.session_state.messages.append({"user": user_message, "bot": bot_message})
# Input box for user to type their message
user_input = st.text_input("Type your message:", placeholder="Ask a course-related question here...")
if user_input:
# Generate response for the user's input
inputs = tokenizer(user_input, return_tensors="pt")
outputs = model.generate(inputs["input_ids"], max_length=200, num_return_sequences=1)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Add user input and bot response to the chat
add_message(user_input, response)
# Clear input box after submission
st.experimental_rerun()
# Display chat history
st.write("---")
for chat in st.session_state.messages:
st.markdown(f"**You:** {chat['user']}")
st.markdown(f"**Bot:** {chat['bot']}")
st.write("---")