ChatBuddy_AI / app.py
UmaKumpatla's picture
Update app.py
7fe0cac verified
import os
import streamlit as st
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
# Set Hugging Face API token securely
HF_TOKEN = os.getenv("key")
os.environ["HUGGINGFACEHUB_API_TOKEN"] = os.getenv("key")
os.environ["HF_TOKEN"] = os.getenv("key")
# ---------- Page Configuration ----------
st.set_page_config(page_title="AI Visionary by Innomatics", page_icon="🧠", layout="centered")
# ---------- Custom CSS ----------
st.markdown("""
<style>
.main {background-color: transparent; padding: 20px;}
.stButton>button {
background-color: white; color: #8B0000; border: 2px solid white;
border-radius: 10px; padding: 10px 20px; font-size: 18px;
font-weight: bold; width: 100%; transition: 0.3s ease-in-out;
}
.stButton>button:hover {
background-color: #8B0000; color: white; border: 2px solid white;
}
h1, h2, h3, p, div, span, label, input, textarea {
color: black !important;
}
</style>
""", unsafe_allow_html=True)
# ---------- UI Header ----------
st.markdown("<h1 style='text-align: center'>AI Visionary by Innomatics🧠</h1>", unsafe_allow_html=True)
st.markdown("### πŸ‘‹ Welcome to the AI Visionary by Innomatics πŸ€–")
st.markdown("""
This dashboard provides an AI mentor that gives instant, skill-adapted help
with Python, SQL, PowerBI, and data science to guide you through module doubts.
""")
st.markdown("## In which module do you have doubt?")
# ---------- Module Buttons ----------
modules = {
"Python": "🐍",
"SQL": "πŸ—ƒοΈ",
"PowerBI": "πŸ“Š",
"Statistics": "πŸ“ˆ",
"Machine_Learning": "πŸ€–",
"Deep_Learning": "🧠"
}
cols = st.columns(3)
for i, (module, emoji) in enumerate(modules.items()):
if cols[i % 3].button(f"{emoji} {module}", key=f"{module}_btn"):
st.session_state.mentor_type = module
st.session_state.mentor_emoji = emoji
# ---------- Session State Defaults ----------
st.session_state.setdefault("mentor_type", None)
st.session_state.setdefault("mentor_emoji", "🧠")
# ---------- Chat Interface ----------
if st.session_state.mentor_type:
mentor = st.session_state.mentor_type
emoji = st.session_state.mentor_emoji
st.subheader(f"{emoji} {mentor.upper()} Mentor Chat")
experience = st.slider("Your experience (in years):", 0, 20, 1)
user_input = st.text_input("Ask your question:")
output_container = st.empty()
# Select HuggingFace model based on module
model_map = {
"Python": ("meta-llama/Llama-3.1-8B-Instruct", "nebius"),
"SQL": ("deepseek-ai/DeepSeek-R1", "nebius"),
"PowerBI": ("deepseek-ai/DeepSeek-R1", "nebius"),
"Statistics": ("meta-llama/Llama-3.2-1B-Instruct", "nebius"),
"Machine_Learning": ("meta-llama/Llama-3.3-70B-Instruct", "nebius"),
"Deep_Learning": ("meta-llama/Meta-Llama-3-70B-Instruct", "hyperbolic")
}
repo_id, provider = model_map.get(mentor, (None, None))
if repo_id:
model = HuggingFaceEndpoint(repo_id=repo_id, provider=provider, temperature=0.5, max_new_tokens=150)
chat_model = ChatHuggingFace(llm=model)
col1, col2 = st.columns(2)
with col1:
if st.button("πŸš€ Ask", key="ask_btn"):
if user_input:
prompt = ChatPromptTemplate.from_messages([
SystemMessagePromptTemplate.from_template(
f"You are a helpful and experienced {mentor.upper()} mentor {emoji} assisting a learner with {experience} years of experience."
),
HumanMessagePromptTemplate.from_template("{question}")
])
formatted_prompt = prompt.format_messages(question=user_input)
with st.spinner(f"{emoji} Mentor is thinking..."):
try:
response = chat_model.invoke(formatted_prompt)
output_container.markdown(f"**πŸ‘€ You:** {user_input}")
output_container.markdown(f"**{emoji} Mentor:** {response.content}")
except Exception as e:
output_container.error(f"❌ An error occurred: {str(e)}")
else:
output_container.warning("⚠️ Please enter a question first!")
with col2:
if st.button("🧹 Clear", key="clear_btn"):
output_container.empty()