|
|
| import os |
| import streamlit as st |
| from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate |
| from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace |
|
|
| os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.getenv("keys") |
| os.environ['HF_TOKEN'] = os.getenv("keys") |
|
|
| st.set_page_config(page_title="AI-Powered Mentor Assistant", page_icon="π§ ") |
|
|
| st.title("π§ AI-Powered Mentor Assistant") |
|
|
|
|
|
|
|
|
| col1, col2, col3, col4, col5, col6 = st.columns(6) |
|
|
| import streamlit as st |
|
|
| st.markdown("### π **Select Your Mentor:**") |
|
|
| col1, col2, col3, col4, col5, col6 = st.columns(6) |
|
|
| with col1: |
| st.markdown("#### π<br><strong>Python</strong>", unsafe_allow_html=True) |
|
|
| with col2: |
| st.markdown("#### π€<br><strong>Machine Learning</strong>", unsafe_allow_html=True) |
|
|
| with col3: |
| st.markdown("#### π§ <br><strong>Deep Learning</strong>", unsafe_allow_html=True) |
|
|
| with col4: |
| st.markdown("#### π<br><strong>Statistics</strong>", unsafe_allow_html=True) |
|
|
| with col5: |
| st.markdown("#### π<br><strong>Data Analysis</strong>", unsafe_allow_html=True) |
|
|
| with col6: |
| st.markdown("#### ποΈ<br><strong>SQL & Power BI</strong>", unsafe_allow_html=True) |
|
|
| st.markdown(""" |
| <h4 style='color:#4CAF50; font-family:sans-serif;'> |
| π <strong>Select Your Mentor Domain:</strong> |
| </h4> |
| """, unsafe_allow_html=True) |
|
|
| |
| mentor_type = st.selectbox( |
| "", |
| ["", "π Python", "π€ Machine Learning", "π§ Deep Learning", "π Statistics", "π Data Analysis", "ποΈ SQL & Power BI"] |
| ) |
| if mentor_type: |
| st.subheader(f"π§ {mentor_type.upper()} Mentor Chat") |
| experience = st.slider("Your experience (in years):", 0, 20, 1) |
| user_input = st.text_input("Ask your question:") |
|
|
|
|
| output_container = st.empty() |
|
|
| if mentor_type == "python": |
| model = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.1-8B-Instruct", provider="nebius", temperature=0.5, max_new_tokens=150, task="conversational") |
| elif mentor_type == "machine_learning": |
| model = HuggingFaceEndpoint(repo_id="deepseek-ai/DeepSeek-R1", provider="nebius", temperature=0.5, max_new_tokens=150, task="conversational") |
| elif mentor_type == "deep_learning": |
| model = HuggingFaceEndpoint(repo_id="deepseek-ai/DeepSeek-R1", provider="nebius", temperature=0.5, max_new_tokens=150, task="conversational") |
| elif mentor_type == "stats": |
| model = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.2-1B-Instruct", provider="nebius", temperature=0.5, max_new_tokens=150, task="conversational") |
| elif mentor_type == "data_anaylasis": |
| model = HuggingFaceEndpoint(repo_id="meta-llama/Llama-3.3-70B-Instruct", provider="nebius", temperature=0.5, max_new_tokens=150, task="conversational") |
| elif mentor_type == "sql and powerbi": |
| model = HuggingFaceEndpoint(repo_id="meta-llama/Meta-Llama-3-70B-Instruct", provider="hyperbolic", temperature=0.5, max_new_tokens=150, task="conversational") |
| model = HuggingFaceEndpoint(repo_id="meta-llama/Meta-Llama-3-70B-Instruct", provider="hyperbolic", temperature=0.5, max_new_tokens=150, task="conversational") |
| chat_model = ChatHuggingFace( |
| llm=model, |
| repo_id=model.repo_id, |
| provider="nebius", |
| temperature=0.5, |
| max_new_tokens=150, |
| task="conversational" |
| ) |
|
|
|
|
| if st.button("Ask") and user_input: |
| prompt = ChatPromptTemplate.from_messages([ |
| SystemMessagePromptTemplate.from_template( |
| f"You are a helpful and experienced {mentor_type.upper()} mentor assisting a learner with {experience} years of experience." |
| ), |
| HumanMessagePromptTemplate.from_template("{question}") |
| ]) |
| formatted_prompt = prompt.format_messages(question=user_input) |
|
|
| with st.spinner("Mentor is thinking..."): |
| response = chat_model.invoke(formatted_prompt) |
|
|
| output_container.markdown(f"π€ You: {user_input}") |
| output_container.markdown(f"π§ Mentor: {response.content}") |
|
|
| if st.button("Clear Output"): |
| output_container.empty() |