Chat_bot / src /streamlit_app.py
sree4411's picture
Update src/streamlit_app.py
df63dc7 verified
import os
import streamlit as st
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
# Load Hugging Face token
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.getenv("key")
os.environ['HF_TOKEN'] = os.getenv("key")
# Page config
st.set_page_config(page_title="🧠 AI Mentor Hub", page_icon="πŸŽ“")
st.title("πŸŽ“ AI Mentor Hub - Learn Smarter, Faster!")
st.markdown("### πŸ” Select your mentor and ask any question:")
col1, col2, col3, col4, col5, col6 = st.columns(6)
with col1: st.write("🐍 Python")
with col2: st.write("πŸ€– ML")
with col3: st.write("🧠 DL")
with col4: st.write("πŸ“Š Stats")
with col5: st.write("🧾 Data_Anaylasis")
with col6: st.write("πŸ—ƒοΈ sql and powerbi")
# Emoji mentor labels
mentor_labels = {
"python": "🐍 Python",
"machine_learning": "πŸ€– Machine Learning",
"deep_learning": "🧠 Deep Learning",
"stats": "πŸ“Š Statistics",
"data_analysis": "🧾 Data Analysis",
"sql_powerbi": "πŸ—ƒοΈ SQL & Power BI"
}
mentor_configs = {
"python": {"repo_id": "meta-llama/Llama-3.1-8B-Instruct", "provider": "nebius"},
"machine_learning": {"repo_id": "deepseek-ai/DeepSeek-R1", "provider": "nebius"},
"deep_learning": {"repo_id": "deepseek-ai/DeepSeek-R1", "provider": "nebius"},
"stats": {"repo_id": "meta-llama/Llama-3.2-1B-Instruct", "provider": "nebius"},
"data_analysis": {"repo_id": "meta-llama/Llama-3.3-70B-Instruct", "provider": "nebius"},
"sql_powerbi": {"repo_id": "meta-llama/Meta-Llama-3-70B-Instruct", "provider": "hyperbolic"}
}
# Show options
mentor_choice = st.selectbox("Choose a mentor topic:", [""] + list(mentor_labels.keys()))
# Storage for conversation
if "conversation" not in st.session_state:
st.session_state.conversation = []
if mentor_choice:
label = mentor_labels[mentor_choice]
st.subheader(f"{label} Mentor Chat")
# Inputs
experience = st.slider("πŸ“… Your experience (years):", 0, 20, 1)
question = st.text_input("πŸ’¬ Ask your question:")
# Model setup
config = mentor_configs[mentor_choice]
repo_id = config["repo_id"]
provider = config["provider"]
model = HuggingFaceEndpoint(
repo_id=repo_id,
provider=provider,
temperature=0.5,
max_new_tokens=150,
task="conversational"
)
chat_model = ChatHuggingFace(
llm=model,
repo_id=repo_id,
provider=provider, # FIXED: use correct provider dynamically
temperature=0.5,
max_new_tokens=150,
task="conversational"
)
output_box = st.empty()
if st.button("🧠 Get Answer"):
if not question.strip():
st.warning("❗ Please enter a question.")
else:
prompt = ChatPromptTemplate.from_messages([
SystemMessagePromptTemplate.from_template(
f"You are a helpful and expert {mentor_choice.replace('_', ' ').title()} mentor. The user has {experience} years of experience. Answer clearly."
),
HumanMessagePromptTemplate.from_template("{question}")
])
messages = prompt.format_messages(question=question)
with st.spinner("Thinking..."):
response = chat_model.invoke(messages)
answer = response.content
output_box.markdown(f"πŸ‘€ **You:** {question}")
output_box.markdown(f"🧠 **Mentor:** {answer}")
st.session_state.conversation.append(f"You: {question}")
st.session_state.conversation.append(f"Mentor: {answer}")
if st.button("πŸ—‘οΈ Clear Chat"):
output_box.empty()
st.session_state.conversation = []
if st.session_state.conversation:
convo_text = "\n".join(st.session_state.conversation)
st.download_button(
"⬇️ Download Conversation",
data=convo_text,
file_name=f"{mentor_choice}_chat.txt",
mime="text/plain"
)