File size: 4,071 Bytes
9d311ec bcf4c2c 9d311ec e70fc9d 9d311ec e70fc9d df63dc7 e70fc9d 7586f19 9d311ec e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d db324f2 e70fc9d 7586f19 e70fc9d 7586f19 e70fc9d bfc965b db324f2 e70fc9d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
import os
import streamlit as st
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
# Load Hugging Face token
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.getenv("key")
os.environ['HF_TOKEN'] = os.getenv("key")
# Page config
st.set_page_config(page_title="π§ AI Mentor Hub", page_icon="π")
st.title("π AI Mentor Hub - Learn Smarter, Faster!")
st.markdown("### π Select your mentor and ask any question:")
col1, col2, col3, col4, col5, col6 = st.columns(6)
with col1: st.write("π Python")
with col2: st.write("π€ ML")
with col3: st.write("π§ DL")
with col4: st.write("π Stats")
with col5: st.write("π§Ύ Data_Anaylasis")
with col6: st.write("ποΈ sql and powerbi")
# Emoji mentor labels
mentor_labels = {
"python": "π Python",
"machine_learning": "π€ Machine Learning",
"deep_learning": "π§ Deep Learning",
"stats": "π Statistics",
"data_analysis": "π§Ύ Data Analysis",
"sql_powerbi": "ποΈ SQL & Power BI"
}
mentor_configs = {
"python": {"repo_id": "meta-llama/Llama-3.1-8B-Instruct", "provider": "nebius"},
"machine_learning": {"repo_id": "deepseek-ai/DeepSeek-R1", "provider": "nebius"},
"deep_learning": {"repo_id": "deepseek-ai/DeepSeek-R1", "provider": "nebius"},
"stats": {"repo_id": "meta-llama/Llama-3.2-1B-Instruct", "provider": "nebius"},
"data_analysis": {"repo_id": "meta-llama/Llama-3.3-70B-Instruct", "provider": "nebius"},
"sql_powerbi": {"repo_id": "meta-llama/Meta-Llama-3-70B-Instruct", "provider": "hyperbolic"}
}
# Show options
mentor_choice = st.selectbox("Choose a mentor topic:", [""] + list(mentor_labels.keys()))
# Storage for conversation
if "conversation" not in st.session_state:
st.session_state.conversation = []
if mentor_choice:
label = mentor_labels[mentor_choice]
st.subheader(f"{label} Mentor Chat")
# Inputs
experience = st.slider("π
Your experience (years):", 0, 20, 1)
question = st.text_input("π¬ Ask your question:")
# Model setup
config = mentor_configs[mentor_choice]
repo_id = config["repo_id"]
provider = config["provider"]
model = HuggingFaceEndpoint(
repo_id=repo_id,
provider=provider,
temperature=0.5,
max_new_tokens=150,
task="conversational"
)
chat_model = ChatHuggingFace(
llm=model,
repo_id=repo_id,
provider=provider, # FIXED: use correct provider dynamically
temperature=0.5,
max_new_tokens=150,
task="conversational"
)
output_box = st.empty()
if st.button("π§ Get Answer"):
if not question.strip():
st.warning("β Please enter a question.")
else:
prompt = ChatPromptTemplate.from_messages([
SystemMessagePromptTemplate.from_template(
f"You are a helpful and expert {mentor_choice.replace('_', ' ').title()} mentor. The user has {experience} years of experience. Answer clearly."
),
HumanMessagePromptTemplate.from_template("{question}")
])
messages = prompt.format_messages(question=question)
with st.spinner("Thinking..."):
response = chat_model.invoke(messages)
answer = response.content
output_box.markdown(f"π€ **You:** {question}")
output_box.markdown(f"π§ **Mentor:** {answer}")
st.session_state.conversation.append(f"You: {question}")
st.session_state.conversation.append(f"Mentor: {answer}")
if st.button("ποΈ Clear Chat"):
output_box.empty()
st.session_state.conversation = []
if st.session_state.conversation:
convo_text = "\n".join(st.session_state.conversation)
st.download_button(
"β¬οΈ Download Conversation",
data=convo_text,
file_name=f"{mentor_choice}_chat.txt",
mime="text/plain"
)
|