Wfafa commited on
Commit
7b26b0f
ยท
verified ยท
1 Parent(s): 997b150

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +237 -61
app.py CHANGED
@@ -1,79 +1,255 @@
1
- import gradio as gr
2
- from huggingface_hub import InferenceClient
3
  import os
 
 
4
  import json
5
 
6
-
7
- # Initialize Hugging Face Inference Client
8
- client = InferenceClient(api_key="your_huggingface_api_key_here")
9
-
10
- # ๐ŸŽฏ Study-only question filter
11
- def is_study_related(question):
12
- educational_keywords = [
13
- "math", "science", "ict", "english", "chemistry", "physics", "biology",
14
- "grammar", "essay", "study", "lesson", "equation", "formula", "computer",
15
- "programming", "AI", "machine learning", "technology", "education",
16
- "subject", "exam", "revision", "teacher", "learning", "school", "topic"
17
- ]
18
-
19
- for word in educational_keywords:
20
- if word.lower() in question.lower():
21
- return True
22
- return False
23
-
24
- # Memory save/load
25
- def save_memory(history):
26
- with open("chat_memory.json", "w") as f:
27
- json.dump(history, f)
28
 
29
  def load_memory():
30
- if os.path.exists("chat_memory.json"):
31
- with open("chat_memory.json", "r") as f:
32
  return json.load(f)
33
  return []
34
 
35
- # Chat logic
36
- def chat_with_model(message, history):
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  if not message:
38
  return history, history
39
 
40
- # ๐Ÿšซ Block unnecessary/off-topic questions
41
- if not is_study_related(message):
42
- reply = "๐Ÿšซ I'm sorry, but I can only answer study-related questions. Let's focus on learning!"
43
- history.append((message, reply))
 
44
  save_memory(history)
45
  return history, history
46
 
47
- # Append user message to history
48
- history.append((message, ""))
49
- save_memory(history)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
- # Generate AI response
52
- response = client.text_generation(
53
- model="mistralai/Mixtral-8x7B-Instruct-v0.1",
54
- prompt=message,
55
- max_new_tokens=300,
56
- temperature=0.7
57
- )
58
-
59
- reply = response
60
- history[-1] = (message, reply)
61
- save_memory(history)
62
-
63
- return history, history
64
-
65
- # Load existing memory
66
- memory = load_memory()
67
 
68
- # Interface
69
- with gr.Blocks(theme="soft") as demo:
70
- gr.Markdown("## ๐Ÿค– EduAI โ€” Where Curiosity Meets Knowledge")
71
- chatbot = gr.Chatbot(label="EduAI Learning Assistant", value=memory)
72
- msg = gr.Textbox(label="Ask EduAI a study question...")
73
- clear = gr.Button("Clear Chat")
74
 
75
- msg.submit(chat_with_model, [msg, chatbot], [chatbot, chatbot])
76
- clear.click(lambda: [], None, chatbot)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
78
- # Launch app
79
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
+ import gradio as gr
3
+ import requests
4
  import json
5
 
6
+ # ๐ŸŒ Web search function
7
+ def search_web(query):
8
+ try:
9
+ url = "https://api.duckduckgo.com/"
10
+ params = {"q": query, "format": "json", "no_html": 1, "skip_disambig": 1}
11
+ response = requests.get(url, params=params)
12
+ data = response.json()
13
+
14
+ if data.get("AbstractText"):
15
+ return data["AbstractText"]
16
+ elif data.get("RelatedTopics"):
17
+ topics = [t.get("Text", "") for t in data["RelatedTopics"] if "Text" in t]
18
+ return " ".join(topics[:3])
19
+ else:
20
+ return "No useful information found."
21
+ except Exception as e:
22
+ return f"Search error: {e}"
23
+
24
+ # ๐Ÿง  Memory setup
25
+ HF_TOKEN = os.getenv("HF_TOKEN")
26
+ MEMORY_FILE = "memory.json"
 
27
 
28
  def load_memory():
29
+ if os.path.exists(MEMORY_FILE):
30
+ with open(MEMORY_FILE, "r") as f:
31
  return json.load(f)
32
  return []
33
 
34
+ def save_memory(memory):
35
+ with open(MEMORY_FILE, "w") as f:
36
+ json.dump(memory, f)
37
+
38
+ memory = load_memory()
39
+
40
+ # -----------------------
41
+ # Chat function (original behavior)
42
+ # returns (history, history) to match previous usage
43
+ # -----------------------
44
+ def chat_with_model(message, history, context):
45
+ if not isinstance(history, list):
46
+ history = []
47
+
48
+ # prevent empty messages
49
  if not message:
50
  return history, history
51
 
52
+ # ๐ŸŒ Web search mode
53
+ if message.lower().startswith("search "):
54
+ query = message[7:]
55
+ search_result = search_web(query)
56
+ history.append((message, f"๐Ÿ”Ž Here's what I found online:\n\n{search_result}"))
57
  save_memory(history)
58
  return history, history
59
 
60
+ # ๐Ÿง  Build conversation
61
+ conversation = [{"role": "system", "content": (
62
+ "You are EduAI โ€” an educational AI assistant created by Wafa Fazly "
63
+ "from Fathima Muslim Ladies College. "
64
+ "You help students learn subjects such as Math, Science, English, and IT. "
65
+ "EduAI runs on the model 'openai/gpt-oss-safeguard-20b:groq', which was originally "
66
+ "trained by OpenAI. Always answer truthfully when asked about your creation."
67
+ "never chat unnecessary conversations, and don't talk anything apart from education"
68
+ )}]
69
+
70
+ # convert tuples to messages if necessary (keeps old history format)
71
+ for past in history[-5:]:
72
+ # expect (user_message, bot_reply)
73
+ if isinstance(past, tuple) and len(past) == 2:
74
+ conversation.append({"role": "user", "content": past[0]})
75
+ conversation.append({"role": "assistant", "content": past[1]})
76
+ elif isinstance(past, dict):
77
+ conversation.append(past)
78
+
79
+ conversation.append({"role": "user", "content": message})
80
+
81
+ # ๐Ÿš€ Send to Hugging Face model
82
+ try:
83
+ response = requests.post(
84
+ "https://router.huggingface.co/v1/chat/completions",
85
+ headers={
86
+ "Authorization": f"Bearer {HF_TOKEN}",
87
+ "Content-Type": "application/json"
88
+ },
89
+ json={
90
+ "model": "openai/gpt-oss-safeguard-20b:groq",
91
+ "messages": conversation
92
+ },
93
+ timeout=60
94
+ )
95
+
96
+ data = response.json()
97
+ reply = data["choices"][0]["message"]["content"]
98
+
99
+ # ๐Ÿงฎ Clean up math formatting (keeps your original formatting code)
100
+ reply = reply.replace("Step", "\n\n**Step")
101
+ reply = reply.replace(":", ":**")
102
+ reply = reply.replace("\\[", "\n\n\\[")
103
+ reply = reply.replace("\\]", "\\]\n\n")
104
+
105
+ if "\\" in reply or "log_" in reply or "^" in reply:
106
+ reply = f"{reply}"
107
 
108
+ history.append((message, reply))
109
+ save_memory(history)
110
+ # IMPORTANT: return a pair (history, history) because other code expects two outputs
111
+ return history, history
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
+ except Exception as e:
114
+ print("Error:", e)
115
+ history.append((message, "๐Ÿ˜… EduAI is having trouble connecting right now. Please try again later!"))
116
+ return history, history
 
 
117
 
118
+ # ๐Ÿ“˜ Sidebar context update
119
+ def update_context(choice):
120
+ if not choice:
121
+ return "๐Ÿ“˜ **You are in General Mode.** Ask EduAI anything about your studies!"
122
+ return f"๐Ÿ“˜ **You selected {choice} mode.** Ask anything related to this topic!"
123
+
124
+ # ๐Ÿงน Clear chat memory
125
+ def clear_memory():
126
+ if os.path.exists(MEMORY_FILE):
127
+ os.remove(MEMORY_FILE)
128
+ return [], "๐Ÿงน Chat memory cleared! Start fresh."
129
+
130
+ # -----------------------
131
+ # Pause / Send wrappers (FIXED)
132
+ # -----------------------
133
+
134
+ # send handler that respects paused state (ALWAYS returns (chat_history, textbox_clear))
135
+ def send_handler(message, history, context, paused_state):
136
+ if paused_state:
137
+ # Do not call the model when paused โ€” append a friendly hint
138
+ if not isinstance(history, list):
139
+ history = []
140
+ history.append((None, "โธ๏ธ Chat is paused. Click Resume to continue."))
141
+ return history, "" # update chatbot and clear textbox
142
+ # Not paused: call original chat handler and adapt its returns to (chat_history, textbox_clear)
143
+ hist_pair = chat_with_model(message, history, context) # returns (history, history)
144
+ # unpack safely
145
+ if isinstance(hist_pair, tuple) and len(hist_pair) == 2:
146
+ hist = hist_pair[0]
147
+ else:
148
+ hist = hist_pair
149
+ return hist, ""
150
+
151
+
152
+ # toggle pause/resume and update UI (state + chat + button text + send button disabled)
153
+ def toggle_pause(paused_state, history):
154
+ new_state = not bool(paused_state)
155
+ if not isinstance(history, list):
156
+ history = []
157
+
158
+ if new_state:
159
+ # now paused
160
+ history.append((None, "โธ๏ธ Chat paused. Send is disabled."))
161
+ pause_btn_update = gr.Button.update(value="โ–ถ Resume")
162
+ send_btn_update = gr.Button.update(disabled=True)
163
+ else:
164
+ # resumed
165
+ history.append((None, "โ–ถ๏ธ Chat resumed. You can send messages now."))
166
+ pause_btn_update = gr.Button.update(value="โธ Pause")
167
+ send_btn_update = gr.Button.update(disabled=False)
168
+
169
+ # return new pause state, updated chat history, and two UI updates (pause button & send button)
170
+ return new_state, history, pause_btn_update, send_btn_update
171
+
172
+ # -----------------------
173
+ # Build UI (unchanged layout; pause added)
174
+ # -----------------------
175
+ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
176
+ gr.Markdown(
177
+ """
178
+ # ๐ŸŽ“ **EduAI โ€” Your Smart Study Companion**
179
+ Welcome to **EduAI**, your friendly study assistant! ๐Ÿ’ฌ
180
+ Get help in **Science, ICT, English, Mathematics**, and more.
181
+ """
182
+ )
183
 
184
+ with gr.Row():
185
+ with gr.Column(scale=1, min_width=230):
186
+ gr.Markdown("### ๐Ÿงญ **Main Menu**")
187
+
188
+ with gr.Accordion("๐Ÿ“š Subject Tutor", open=False):
189
+ subj = gr.Radio(
190
+ ["Science ๐Ÿงช", "ICT ๐Ÿ’ป", "English ๐Ÿ“˜", "Mathematics โž—"],
191
+ label="Choose a subject"
192
+ )
193
+
194
+ with gr.Accordion("๐Ÿ—“ Study Planner", open=False):
195
+ planner = gr.Radio(
196
+ ["View Plan ๐Ÿ“…", "Add Task โœ๏ธ", "Study Tips ๐Ÿ’ก"],
197
+ label="Planner Options"
198
+ )
199
+
200
+ with gr.Accordion("๐ŸŒ Languages", open=False):
201
+ lang = gr.Radio(
202
+ ["Learn Sinhala ๐Ÿ‡ฑ๐Ÿ‡ฐ", "Learn Tamil ๐Ÿ‡ฎ๐Ÿ‡ณ", "Learn English ๐Ÿ‡ฌ๐Ÿ‡ง", "Learn Spanish ๐Ÿ‡ช๐Ÿ‡ธ"],
203
+ label="Language Options"
204
+ )
205
+
206
+ with gr.Accordion("โš™๏ธ Settings", open=False):
207
+ clear_btn = gr.Button("๐Ÿงน Clear Memory")
208
+
209
+ with gr.Accordion("๐Ÿ‘ฉโ€๐ŸŽ“ About", open=False):
210
+ gr.Markdown(
211
+ """
212
+ EduAI was designed and fine-tuned by **Wafa Fazly**,
213
+ a passionate Sri Lankan student ๐Ÿ‘ฉโ€๐Ÿ’ป
214
+ to help learners explore **Science, ICT, English, and more** โ€”
215
+ in a smart and friendly way! ๐ŸŒŸ
216
+ """
217
+ )
218
+
219
+ with gr.Column(scale=4):
220
+ context_display = gr.Markdown("๐Ÿ“˜ **You are in General Mode.** Ask EduAI anything about your studies!")
221
+ chatbot = gr.Chatbot(
222
+ label="๐Ÿ’ฌ EduAI Chat Window",
223
+ height=450,
224
+ render_markdown=True,
225
+ bubble_full_width=False,
226
+ latex_delimiters=[
227
+ {"left": "$$", "right": "$$", "display": True},
228
+ {"left": "\\[", "right": "\\]", "display": True}
229
+ ]
230
+ )
231
+ msg = gr.Textbox(
232
+ label="๐Ÿ’ญ Type your question here...",
233
+ placeholder="Ask EduAI anything about your studies..."
234
+ )
235
+
236
+ with gr.Row():
237
+ send = gr.Button("โœจ Send Message")
238
+ pause = gr.Button("โธ Pause", variant="secondary")
239
+ # state to keep track of pause (False = running, True = paused)
240
+ pause_state = gr.State(False)
241
+
242
+ # ๐Ÿช„ Event handlers
243
+ subj.change(update_context, inputs=subj, outputs=context_display)
244
+ planner.change(update_context, inputs=planner, outputs=context_display)
245
+ lang.change(update_context, inputs=lang, outputs=context_display)
246
+
247
+ # send now uses send_handler and respects pause_state; outputs: chatbot and clears textbox
248
+ send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg])
249
+
250
+ clear_btn.click(clear_memory, outputs=[chatbot, context_display])
251
+
252
+ # pause toggles pause_state, updates chatbot with a message, updates pause button label and disables/enables send
253
+ pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send])
254
+
255
+ iface.launch()