import os
import streamlit as st
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
# Set Hugging Face API token securely
HF_TOKEN = os.getenv("key")
os.environ["HUGGINGFACEHUB_API_TOKEN"] = os.getenv("key")
os.environ["HF_TOKEN"] = os.getenv("key")
# ---------- Page Configuration ----------
st.set_page_config(page_title="AI Visionary by Innomatics", page_icon="π§ ", layout="centered")
# ---------- Custom CSS ----------
st.markdown("""
""", unsafe_allow_html=True)
# ---------- UI Header ----------
st.markdown("
AI Visionary by Innomaticsπ§
", unsafe_allow_html=True)
st.markdown("### π Welcome to the AI Visionary by Innomatics π€")
st.markdown("""
This dashboard provides an AI mentor that gives instant, skill-adapted help
with Python, SQL, PowerBI, and data science to guide you through module doubts.
""")
st.markdown("## In which module do you have doubt?")
# ---------- Module Buttons ----------
modules = {
"Python": "π",
"SQL": "ποΈ",
"PowerBI": "π",
"Statistics": "π",
"Machine_Learning": "π€",
"Deep_Learning": "π§ "
}
cols = st.columns(3)
for i, (module, emoji) in enumerate(modules.items()):
if cols[i % 3].button(f"{emoji} {module}", key=f"{module}_btn"):
st.session_state.mentor_type = module
st.session_state.mentor_emoji = emoji
# ---------- Session State Defaults ----------
st.session_state.setdefault("mentor_type", None)
st.session_state.setdefault("mentor_emoji", "π§ ")
# ---------- Chat Interface ----------
if st.session_state.mentor_type:
mentor = st.session_state.mentor_type
emoji = st.session_state.mentor_emoji
st.subheader(f"{emoji} {mentor.upper()} Mentor Chat")
experience = st.slider("Your experience (in years):", 0, 20, 1)
user_input = st.text_input("Ask your question:")
output_container = st.empty()
# Select HuggingFace model based on module
model_map = {
"Python": ("meta-llama/Llama-3.1-8B-Instruct", "nebius"),
"SQL": ("deepseek-ai/DeepSeek-R1", "nebius"),
"PowerBI": ("deepseek-ai/DeepSeek-R1", "nebius"),
"Statistics": ("meta-llama/Llama-3.2-1B-Instruct", "nebius"),
"Machine_Learning": ("meta-llama/Llama-3.3-70B-Instruct", "nebius"),
"Deep_Learning": ("meta-llama/Meta-Llama-3-70B-Instruct", "hyperbolic")
}
repo_id, provider = model_map.get(mentor, (None, None))
if repo_id:
model = HuggingFaceEndpoint(repo_id=repo_id, provider=provider, temperature=0.5, max_new_tokens=150)
chat_model = ChatHuggingFace(llm=model)
col1, col2 = st.columns(2)
with col1:
if st.button("π Ask", key="ask_btn"):
if user_input:
prompt = ChatPromptTemplate.from_messages([
SystemMessagePromptTemplate.from_template(
f"You are a helpful and experienced {mentor.upper()} mentor {emoji} assisting a learner with {experience} years of experience."
),
HumanMessagePromptTemplate.from_template("{question}")
])
formatted_prompt = prompt.format_messages(question=user_input)
with st.spinner(f"{emoji} Mentor is thinking..."):
try:
response = chat_model.invoke(formatted_prompt)
output_container.markdown(f"**π€ You:** {user_input}")
output_container.markdown(f"**{emoji} Mentor:** {response.content}")
except Exception as e:
output_container.error(f"β An error occurred: {str(e)}")
else:
output_container.warning("β οΈ Please enter a question first!")
with col2:
if st.button("π§Ή Clear", key="clear_btn"):
output_container.empty()