File size: 9,383 Bytes
3cf547e
b303d68
3c64301
b303d68
0c6e54c
 
 
 
 
 
b303d68
0c6e54c
b303d68
 
 
460e302
0c6e54c
b303d68
0673389
b303d68
0673389
 
 
 
 
 
 
 
 
460e302
0673389
 
 
0c6e54c
 
 
 
 
 
 
 
fe04269
0673389
0c6e54c
0673389
0c6e54c
0673389
0c6e54c
96a68f2
 
 
 
 
 
 
 
0c6e54c
 
b303d68
0673389
0c6e54c
0ec28b0
4b3c5c3
f96cb94
0c6e54c
 
 
 
 
 
 
f96cb94
 
d66034d
efcd10f
f96cb94
fe04269
 
3c64301
 
 
0c6e54c
 
 
 
 
f96cb94
3c64301
f765230
02e5e45
f765230
023f854
02e5e45
 
 
 
 
 
 
 
 
 
 
978e90d
02e5e45
 
 
 
 
96a68f2
 
 
02e5e45
 
3c64301
 
 
 
 
023f854
efcd10f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe04269
3c64301
 
 
 
023f854
efcd10f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe04269
3c64301
 
 
 
023f854
efcd10f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe04269
3c64301
 
 
 
023f854
efcd10f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe04269
3c64301
 
 
 
023f854
6f2b6f0
efcd10f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe04269
3c64301
 
 
 
023f854
efcd10f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe04269
3c64301
f96cb94
0c6e54c
8233033
0ec28b0
0c6e54c
 
414c85b
0c6e54c
414c85b
0c6e54c
414c85b
0c6e54c
414c85b
0c6e54c
414c85b
0c6e54c
414c85b
0c6e54c
e010c31
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
import streamlit as st
import os
from langchain_huggingface import HuggingFaceEndpoint,HuggingFacePipeline,ChatHuggingFace

# Secure token loading
hf_token = os.getenv("KEYS")
if not hf_token:
    raise ValueError("Environment variable 'KEYS' is not set.")
os.environ["HF_TOKEN"] = hf_token
os.environ["HUGGINGFACEHUB_API_KEY"] = hf_token

# Custom CSS
custom_css = """
<style>
    html, body, [data-testid="stAppViewContainer"] {
        background: url('https://wallpapercave.com/wp/wp2581376.jpg') no-repeat center center fixed;
        background-size: cover;
        font-family: Arial, sans-serif;
        color: #f5f5f5;
    }

    /* Optional overlay for readability */
    [data-testid="stAppViewContainer"]::before {
        content: "";
        position: fixed;
        top: 0;
        left: 0;
        width: 100%;
        height: 100%;
        background: rgba(0, 0, 0, 0.4); /* dark overlay */
        z-index: -1;
    }

    .button {
        display: inline-block;
        padding: 10px 20px;
        margin: 10px;
        font-size: 16px;
        text-decoration: none;
        color: white;
        border-radius: 5px;
        background-color:#E1D5FC;
        transition: background-color 0.3s ease;
    }

    .button:hover {
        background-color: #e68b7f;
    }

    .chat-reply {
            color: #00ffcc;
            background-color: #1e1e1e;
            padding: 10px;
            border-radius: 8px;
            margin-bottom: 10px;
        }
</style>
"""


st.markdown(custom_css, unsafe_allow_html=True)

mentoring = """
<div>
    <a href="?page=python" class="button">Python</a>
    <a href="?page=eda" class="button">Data Analysis</a>
    <a href="?page=stats" class="button">Statistics</a>
    <a href="?page=ml" class="button">Machine Learning</a>
    <a href="?page=dl" class="button">Deep Learning</a>
    <a href="?page=excel" class="button">MS Excel</a>
    <a href="?page=power_bi" class="button">Power BI</a>
</div>
"""
llama_model = HuggingFaceEndpoint(repo_id = "meta-llama/Llama-3.1-8B-Instruct",provider = 'novita',temperature = 0.7,max_new_tokens = 10,task="conversational")
llama = ChatHuggingFace(llm = llama_model,repo_id = "meta-llama/Llama-3.1-8B-Instruct",provider = 'novita',temperature = 0.7,max_new_tokens = 10,task="conversational")

deepseek_model = HuggingFaceEndpoint(repo_id = "deepseek-ai/DeepSeek-R1",provider = 'novita',temperature = 0.7,max_new_tokens = 10,task="conversational")
deepseek = ChatHuggingFace(llm = deepseek_model,repo_id = "deepseek-ai/DeepSeek-R1",provider = 'novita',temperature = 0.7,max_new_tokens = 10,task="conversational")


from langchain.prompts import ChatPromptTemplate,SystemMessagePromptTemplate,HumanMessagePromptTemplate
# Page definitions
def main_page():
    st.title("Welcome To Innomatics AI Mentor Support")
    st.write("Here is the list of modules we offer to support you in your learning journey...")
    st.markdown(mentoring, unsafe_allow_html=True)

def python_page(): 
    st.title("Hi, Have doubts in Python? 🤔 Let's solve them together 😎🤩")

    exps = st.number_input("Choose a mentor based on their experience: 0-20 years")
    doubts = st.text_input("Explain your doubt")

    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in Python."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = llama.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)

        #st.write(response.content)


    
    
def eda_page(): 
    st.title("Data Analysis Mentoring")
    exps=st.number_input("Choose a mentor based on their experience: 0-20")
    doubts = st.text_input("Explain your doubt")
    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in Data Analysis."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = deepseek.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)

def stats_page(): 
    st.title("Statistics Mentoring")
    exps=st.number_input("Choose a mentor based on their experience: 0-20")
    doubts = st.text_input("Explain your doubt")
    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in Statistical Analysis."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = llama.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)

def ml_page(): 
    st.title("Machine Learning Mentoring")
    exps=st.number_input("Choose a mentor based on their experience: 0-20")
    doubts = st.text_input("Explain your doubt")
    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in ML."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = deepseek.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)

def dl_page(): 
    st.title("Deep Learning Mentoring")
    exps=st.number_input("Choose a mentor based on their experience: 0-20")
    doubts = st.text_input("Explain your doubt")
    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in Deep learning."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = llama.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)

def excel_page(): 
    st.title("MS Excel Mentoring")
    exps=st.number_input("Choose a mentor based on their experience: 0-20")
    doubts = st.text_input("Explain your doubt")
    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in Excel."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = deepseek.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)

def power_bi_page(): 
    st.title("Power BI Mentoring")
    exps=st.number_input("Choose a mentor based on their experience: 0-20")
    doubts = st.text_input("Explain your doubt")
    if doubts:
        # Set up the prompt
        messages = [
            SystemMessagePromptTemplate.from_template("You have an experience of {exp} years in Power BI."),
            HumanMessagePromptTemplate.from_template("Provide a clear solution for the following doubt: {doubt}")
        ]

        prompt = ChatPromptTemplate.from_messages(messages)
        formatted_prompt = prompt.format(exp=exps, doubt=doubts)


        # Get response
        response = llama.invoke(formatted_prompt)

        # Display in Streamlit
        st.markdown("### 🧠 Solution:")
        st.markdown(f'<div class="chat-reply">{response.content}</div>', unsafe_allow_html=True)


# Routing
page = st.query_params.get("page", "main")

if page == "python":
    python_page()
elif page == "eda":
    eda_page()
elif page == "stats":
    stats_page()
elif page == "ml":
    ml_page()
elif page == "dl":
    dl_page()
elif page == "excel":
    excel_page()
elif page == "power_bi":
    power_bi_page()
else:
    main_page()