sree4411 commited on
Commit
e70fc9d
Β·
verified Β·
1 Parent(s): 73cb7dd

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +69 -66
src/streamlit_app.py CHANGED
@@ -3,101 +3,104 @@ import streamlit as st
3
  from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
4
  from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
5
 
6
- # API key setup
7
  os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.getenv("key")
8
  os.environ['HF_TOKEN'] = os.getenv("key")
9
 
10
- # Streamlit config
11
- st.set_page_config(page_title="🧠 Multi-Domain Knowledge Assistant", page_icon="πŸŽ“", layout="wide")
12
- st.title("πŸŽ“ Multi-Topic Mentor Assistant")
13
- st.markdown("Get expert help in different data & tech domains. Choose a mentor below to get started:")
14
-
15
- # Mentor options with emojis and display names
16
- MENTORS = {
17
- "python": ("🐍 Python", "meta-llama/Llama-3.1-8B-Instruct", "nebius"),
18
- "machine_learning": ("πŸ€– ML", "deepseek-ai/DeepSeek-R1", "nebius"),
19
- "deep_learning": ("🧠 DL", "deepseek-ai/DeepSeek-R1", "nebius"),
20
- "stats": ("πŸ“Š Stats", "meta-llama/Llama-3.2-1B-Instruct", "nebius"),
21
- "data_analysis": ("πŸ“ˆ Data Analysis", "meta-llama/Llama-3.3-70B-Instruct", "nebius"),
22
- "sql_powerbi": ("πŸ—„οΈ SQL + Power BI", "meta-llama/Meta-Llama-3-70B-Instruct", "hyperbolic")
 
23
  }
24
 
25
- # Display custom styled buttons in columns
26
- st.markdown("### πŸ§‘β€πŸ« Choose a Mentor Domain")
27
- cols = st.columns(len(MENTORS))
28
- mentor_keys = list(MENTORS.keys())
29
- selected = None
 
 
 
30
 
31
- for i, col in enumerate(cols):
32
- with col:
33
- if st.button(MENTORS[mentor_keys[i]][0]):
34
- selected = mentor_keys[i]
35
- st.session_state["selected_mentor"] = selected
36
 
37
- # Use selection from session state if exists
38
- if "selected_mentor" in st.session_state:
39
- selected = st.session_state["selected_mentor"]
40
 
41
- if selected:
42
- mentor_name, repo_id, provider = MENTORS[selected]
43
- st.subheader(f"{mentor_name} Mentor πŸ’¬")
44
 
45
- # User experience slider and question input
46
- experience = st.slider("πŸ“… Your Experience (Years):", 0, 20, 1)
47
- user_input = st.text_input("πŸ“ Ask your question:")
48
 
49
- output_container = st.container()
 
 
 
50
 
51
- # Load the model
52
  model = HuggingFaceEndpoint(
53
  repo_id=repo_id,
54
  provider=provider,
55
  temperature=0.5,
56
- max_new_tokens=200,
57
  task="conversational"
58
  )
 
59
  chat_model = ChatHuggingFace(
60
  llm=model,
61
  repo_id=repo_id,
62
- provider=provider,
63
  temperature=0.5,
64
- max_new_tokens=200,
65
  task="conversational"
66
  )
67
 
68
- if "history" not in st.session_state:
69
- st.session_state["history"] = []
70
 
71
- # Ask button
72
- if st.button("🧠 Ask Mentor"):
73
- if user_input:
 
74
  prompt = ChatPromptTemplate.from_messages([
75
  SystemMessagePromptTemplate.from_template(
76
- f"You are a helpful and experienced {mentor_name} mentor assisting a learner with {experience} years of experience."
77
  ),
78
  HumanMessagePromptTemplate.from_template("{question}")
79
  ])
80
- formatted_prompt = prompt.format_messages(question=user_input)
81
 
82
  with st.spinner("Thinking..."):
83
- response = chat_model.invoke(formatted_prompt)
84
-
85
- # Save and show conversation
86
- st.session_state["history"].append(f"πŸ‘€ You: {user_input}")
87
- st.session_state["history"].append(f"🧠 Mentor: {response.content}")
88
-
89
-
90
- # Clear and download buttons
91
- col_clear, col_download = st.columns(2)
92
- with col_clear:
93
- if st.button("🧹 Clear Conversation"):
94
- st.session_state["history"] = []
95
- output_container.empty()
96
- with col_download:
97
- if st.session_state["history"]:
98
- st.download_button(
99
- label="⬇️ Download Chat",
100
- data="\n".join(st.session_state["history"]),
101
- file_name=f"{selected}_mentor_chat.txt",
102
- mime="text/plain"
103
- )
 
3
  from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
4
  from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
5
 
6
+ # Load Hugging Face token
7
  os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.getenv("key")
8
  os.environ['HF_TOKEN'] = os.getenv("key")
9
 
10
+ # Page config
11
+ st.set_page_config(page_title="🧠 AI Mentor Hub", page_icon="πŸŽ“")
12
+ st.title("πŸŽ“ AI Mentor Hub - Learn Smarter, Faster!")
13
+
14
+ st.markdown("### πŸ” Select your mentor and ask any question:")
15
+
16
+ # Emoji mentor labels
17
+ mentor_labels = {
18
+ "python": "🐍 Python",
19
+ "machine_learning": "πŸ€– Machine Learning",
20
+ "deep_learning": "🧠 Deep Learning",
21
+ "stats": "πŸ“Š Statistics",
22
+ "data_analysis": "🧾 Data Analysis",
23
+ "sql_powerbi": "πŸ—ƒοΈ SQL & Power BI"
24
  }
25
 
26
+ mentor_configs = {
27
+ "python": {"repo_id": "meta-llama/Llama-3.1-8B-Instruct", "provider": "nebius"},
28
+ "machine_learning": {"repo_id": "deepseek-ai/DeepSeek-R1", "provider": "nebius"},
29
+ "deep_learning": {"repo_id": "deepseek-ai/DeepSeek-R1", "provider": "nebius"},
30
+ "stats": {"repo_id": "meta-llama/Llama-3.2-1B-Instruct", "provider": "nebius"},
31
+ "data_analysis": {"repo_id": "meta-llama/Llama-3.3-70B-Instruct", "provider": "nebius"},
32
+ "sql_powerbi": {"repo_id": "meta-llama/Meta-Llama-3-70B-Instruct", "provider": "hyperbolic"}
33
+ }
34
 
35
+ # Show options
36
+ mentor_choice = st.selectbox("Choose a mentor topic:", [""] + list(mentor_labels.keys()))
 
 
 
37
 
38
+ # Storage for conversation
39
+ if "conversation" not in st.session_state:
40
+ st.session_state.conversation = []
41
 
42
+ if mentor_choice:
43
+ label = mentor_labels[mentor_choice]
44
+ st.subheader(f"{label} Mentor Chat")
45
 
46
+ # Inputs
47
+ experience = st.slider("πŸ“… Your experience (years):", 0, 20, 1)
48
+ question = st.text_input("πŸ’¬ Ask your question:")
49
 
50
+ # Model setup
51
+ config = mentor_configs[mentor_choice]
52
+ repo_id = config["repo_id"]
53
+ provider = config["provider"]
54
 
 
55
  model = HuggingFaceEndpoint(
56
  repo_id=repo_id,
57
  provider=provider,
58
  temperature=0.5,
59
+ max_new_tokens=150,
60
  task="conversational"
61
  )
62
+
63
  chat_model = ChatHuggingFace(
64
  llm=model,
65
  repo_id=repo_id,
66
+ provider=provider, # FIXED: use correct provider dynamically
67
  temperature=0.5,
68
+ max_new_tokens=150,
69
  task="conversational"
70
  )
71
 
72
+ output_box = st.empty()
 
73
 
74
+ if st.button("🧠 Get Answer"):
75
+ if not question.strip():
76
+ st.warning("❗ Please enter a question.")
77
+ else:
78
  prompt = ChatPromptTemplate.from_messages([
79
  SystemMessagePromptTemplate.from_template(
80
+ f"You are a helpful and expert {mentor_choice.replace('_', ' ').title()} mentor. The user has {experience} years of experience. Answer clearly."
81
  ),
82
  HumanMessagePromptTemplate.from_template("{question}")
83
  ])
84
+ messages = prompt.format_messages(question=question)
85
 
86
  with st.spinner("Thinking..."):
87
+ response = chat_model.invoke(messages)
88
+
89
+ answer = response.content
90
+ output_box.markdown(f"πŸ‘€ **You:** {question}")
91
+ output_box.markdown(f"🧠 **Mentor:** {answer}")
92
+ st.session_state.conversation.append(f"You: {question}")
93
+ st.session_state.conversation.append(f"Mentor: {answer}")
94
+
95
+ if st.button("πŸ—‘οΈ Clear Chat"):
96
+ output_box.empty()
97
+ st.session_state.conversation = []
98
+
99
+ if st.session_state.conversation:
100
+ convo_text = "\n".join(st.session_state.conversation)
101
+ st.download_button(
102
+ "⬇️ Download Conversation",
103
+ data=convo_text,
104
+ file_name=f"{mentor_choice}_chat.txt",
105
+ mime="text/plain"
106
+ )