Wfafa commited on
Commit
c243479
Β·
verified Β·
1 Parent(s): 2935bc3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +116 -105
app.py CHANGED
@@ -2,142 +2,153 @@ import os
2
  import gradio as gr
3
  import requests
4
  import json
 
5
 
6
- # 🌍 Hugging Face DeepSeek Model Token
7
  HF_TOKEN = os.getenv("HF_TOKEN")
8
 
9
- # 🧠 Memory
10
- MEMORY_FILE = "memory.json"
11
- def load_memory():
12
- if os.path.exists(MEMORY_FILE):
13
- with open(MEMORY_FILE, "r") as f:
14
- return json.load(f)
15
- return []
16
-
17
- def save_memory(memory):
18
- with open(MEMORY_FILE, "w") as f:
19
- json.dump(memory, f)
20
-
21
- memory = load_memory()
22
-
23
- # πŸ’¬ Chat function with Hugging Face
24
- def ai_response(message, history, context, file_input=None):
25
- if not isinstance(history, list):
26
  history = []
27
 
28
- if file_input:
29
- message += f"\n\nπŸ“Ž (User uploaded file '{file_input.name}')"
30
-
31
- conversation = [
32
- {"role":"system", "content":(
33
- "You are EduAI, a multilingual educational AI assistant. "
34
- "Explain answers like a professional tutor, use Markdown/LaTeX for math, "
35
- "and keep answers neat and student-friendly."
36
- )}
37
- ]
38
 
39
  for past_user, past_bot in history[-5:]:
40
- conversation.append({"role":"user","content":past_user})
41
- conversation.append({"role":"assistant","content":past_bot})
42
- conversation.append({"role":"user","content":message})
 
43
 
44
  try:
45
  response = requests.post(
46
- "https://router.huggingface.co/v1/chat/completions",
47
- headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type":"application/json"},
48
- json={"model":"deepseek-ai/DeepSeek-V3.2-Exp:novita", "messages":conversation}
 
 
 
 
 
 
49
  )
50
  data = response.json()
51
  reply = data["choices"][0]["message"]["content"]
52
  history.append((message, reply))
53
- save_memory(history)
54
- return history, history
55
  except Exception as e:
56
  history.append((message, f"❌ Error contacting model: {e}"))
57
- return history, history
58
-
59
- # 🧠 MCQ generator (dynamic)
60
- def generate_mcq(topic):
61
- prompt = f"Create a 3-question multiple-choice quiz about '{topic}' with 4 options each and indicate the correct answer."
 
 
 
62
  try:
63
- response = requests.post(
64
- "https://router.huggingface.co/v1/chat/completions",
65
- headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type":"application/json"},
66
- json={"model":"deepseek-ai/DeepSeek-V3.2-Exp:novita", "messages":[{"role":"user","content":prompt}]}
67
- )
68
- data = response.json()
69
- reply = data["choices"][0]["message"]["content"]
70
- # Simple formatting: each question as separate div
71
- quiz_html = "<div style='color:white; font-family:Poppins, sans-serif;'>"
72
- for i, line in enumerate(reply.split("\n")):
73
- quiz_html += f"{line}<br>"
74
  quiz_html += "</div>"
75
- return quiz_html
76
- except Exception as e:
77
- return f"❌ Error generating MCQs: {e}"
78
-
79
- # πŸ“˜ Sidebar context
80
- def update_context(choice):
81
- if not choice:
82
- return "πŸ“˜ **You are in General Mode.** Ask EduAI anything about your studies!"
83
- return f"πŸ“˜ **You selected {choice} mode.** Ask anything related to this topic!"
84
-
85
- # 🧹 Clear chat
86
- def clear_memory():
87
- if os.path.exists(MEMORY_FILE):
88
- os.remove(MEMORY_FILE)
89
- return [], "🧹 Chat memory cleared! Start fresh."
90
-
91
- # 🎨 UI Custom CSS
 
 
 
 
 
 
 
 
 
 
92
  custom_css = """
93
- .gradio-container {background: #1e1e2f; font-family:'Poppins', sans-serif; color:white;}
94
- #chatbot {background: rgba(30,30,47,0.8); border-radius: 16px; backdrop-filter: blur(10px);}
95
- button.primary {background: linear-gradient(90deg,#7c3aed,#a855f7); color:white; border:none; border-radius:8px;}
96
- button.primary:hover {background: linear-gradient(90deg,#6d28d9,#9333ea);}
97
- input[type=checkbox] {accent-color:#a855f7;}
98
  """
99
 
100
- # 🎨 Gradio Blocks
101
- with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="violet")) as iface:
102
- gr.Markdown("<h1 style='text-align:center; color:#A855F7;'>πŸŽ“ EduAI β€” Smart Study Companion</h1>")
103
 
104
  with gr.Row():
105
- # 🌈 Left Sidebar
106
  with gr.Column(scale=1, min_width=250):
107
- gr.Markdown("### 🧭 **Main Menu**")
 
108
  with gr.Accordion("πŸ“š Study Tutor", open=False):
109
- tutor_subj = gr.Radio(["Mathematics βž—","Science πŸ§ͺ","ICT πŸ’»","English πŸ“˜"], label="Select Subject")
 
 
 
110
 
111
  with gr.Accordion("🧠 MCQ Generator", open=False):
112
- mcq_topic = gr.Textbox(label="Enter topic for quiz:")
113
- mcq_btn = gr.Button("Generate Quiz 🎯", elem_classes="primary")
114
- mcq_output = gr.HTML("<i>Quiz will appear here...</i>")
115
 
116
- with gr.Accordion("πŸ—“ Study Planner", open=False):
117
- planner_choice = gr.Radio(["View Plan πŸ“…","Add Task ✏️","Study Tips πŸ’‘"], label="Planner Options")
118
-
119
- with gr.Accordion("πŸ’‘ Study Adviser", open=False):
120
- adviser_input = gr.Textbox(label="Ask for study advice")
121
  adviser_btn = gr.Button("Get Advice", elem_classes="primary")
122
- adviser_output = gr.HTML("<i>Advice will appear here...</i>")
123
 
124
- with gr.Accordion("βš™οΈ Settings", open=False):
125
- clear_btn = gr.Button("🧹 Clear Memory")
 
 
 
126
 
127
- # πŸ’¬ Main Chat
128
- with gr.Column(scale=3):
129
- context_display = gr.Markdown("πŸ“˜ **General Mode.** Ask EduAI anything!")
130
- chatbot = gr.Chatbot(label="EduAI Chat", elem_id="chatbot", height=450, type="messages")
131
  user_input = gr.Textbox(label="Type your question:")
132
- send_btn = gr.Button("Send ✈️", elem_classes="primary")
133
- file_input = gr.File(label="πŸ“‚ Upload optional file")
134
-
135
- # βš™οΈ Event handlers
136
- tutor_subj.change(update_context, inputs=tutor_subj, outputs=context_display)
137
- planner_choice.change(update_context, inputs=planner_choice, outputs=context_display)
138
- send_btn.click(ai_response, inputs=[user_input, chatbot, context_display, file_input], outputs=[chatbot, chatbot])
139
- clear_btn.click(clear_memory, outputs=[chatbot, context_display])
140
- mcq_btn.click(generate_mcq, inputs=mcq_topic, outputs=mcq_output)
141
- adviser_btn.click(ai_response, inputs=[adviser_input, chatbot, context_display, None], outputs=[adviser_output, adviser_output])
 
 
 
 
 
 
 
 
 
 
142
 
143
  iface.launch()
 
2
  import gradio as gr
3
  import requests
4
  import json
5
+ import random
6
 
7
+ # Hugging Face API token
8
  HF_TOKEN = os.getenv("HF_TOKEN")
9
 
10
+ # =========================
11
+ # AI response via DeepSeek
12
+ # =========================
13
+ def ai_response(message, history=None):
14
+ if history is None:
 
 
 
 
 
 
 
 
 
 
 
 
15
  history = []
16
 
17
+ conversation = [{"role": "system", "content": (
18
+ "You are EduAI, a professional multilingual educational assistant. "
19
+ "Explain clearly with examples, use step-by-step for math, and Markdown/LaTeX for equations."
20
+ )}]
 
 
 
 
 
 
21
 
22
  for past_user, past_bot in history[-5:]:
23
+ conversation.append({"role": "user", "content": past_user})
24
+ conversation.append({"role": "assistant", "content": past_bot})
25
+
26
+ conversation.append({"role": "user", "content": message})
27
 
28
  try:
29
  response = requests.post(
30
+ "https://api-inference.huggingface.co/v1/chat/completions",
31
+ headers={
32
+ "Authorization": f"Bearer {HF_TOKEN}",
33
+ "Content-Type": "application/json"
34
+ },
35
+ json={
36
+ "model": "deepseek-ai/DeepSeek-V3.2-Exp:novita",
37
+ "messages": conversation
38
+ }
39
  )
40
  data = response.json()
41
  reply = data["choices"][0]["message"]["content"]
42
  history.append((message, reply))
43
+ return history, reply
 
44
  except Exception as e:
45
  history.append((message, f"❌ Error contacting model: {e}"))
46
+ return history, history[-1][1]
47
+
48
+ # =========================
49
+ # MCQ Generator (AI-based)
50
+ # =========================
51
+ def generate_mcq(subject):
52
+ prompt = f"Create 3 multiple-choice questions with 4 options each and the correct answer for {subject}. Return as JSON like: [{{'question':'...','options':['..'],'answer':'..'}}, ...]"
53
+ history, reply = ai_response(prompt)
54
  try:
55
+ mcqs = json.loads(reply)
56
+ except:
57
+ return f"❌ Error generating MCQs: {reply}"
58
+
59
+ quiz_html = "<div style='font-family:Poppins, sans-serif; color:#EEE;'>"
60
+ for i, q in enumerate(mcqs, 1):
61
+ quiz_html += f"<div style='margin-bottom:15px; padding:12px; border-radius:10px; background:#2a2a2a;'>"
62
+ quiz_html += f"<b style='color:#7C3AED;'>{i}. {q['question']}</b><br>"
63
+ for opt in q['options']:
64
+ quiz_html += f"<label style='display:block; margin:5px 0;'><input type='checkbox' value='{opt}' style='accent-color:#7C3AED;'> {opt}</label>"
65
+ quiz_html += f"<details style='color:#fff;'><summary>Show Answer</summary>Answer: {q['answer']}</details>"
66
  quiz_html += "</div>"
67
+ quiz_html += "</div>"
68
+ return quiz_html
69
+
70
+ # =========================
71
+ # Study Adviser
72
+ # =========================
73
+ def study_adviser(question):
74
+ history, answer = ai_response(f"Give a professional study advice: {question}")
75
+ return answer
76
+
77
+ # =========================
78
+ # Study Planner
79
+ # =========================
80
+ planner_tasks = []
81
+
82
+ def add_task(task):
83
+ planner_tasks.append(task)
84
+ return "\n".join([f"- {t}" for t in planner_tasks])
85
+
86
+ def view_plan():
87
+ if not planner_tasks:
88
+ return "No tasks added yet."
89
+ return "\n".join([f"- {t}" for t in planner_tasks])
90
+
91
+ # =========================
92
+ # Gradio UI
93
+ # =========================
94
  custom_css = """
95
+ body {background:#121212; color:#EEE; font-family:Poppins, sans-serif;}
96
+ button.primary {background:#7C3AED; color:white; border-radius:8px; padding:5px 12px;}
97
+ button.primary:hover {background:#6D28D9;}
98
+ .gradio-container {background:#121212;}
 
99
  """
100
 
101
+ with gr.Blocks(css=custom_css, theme=gr.themes.Dark()) as iface:
102
+ gr.Markdown("<h1 style='text-align:center; color:#7C3AED;'>πŸŽ“ EduAI β€” Smart Study Companion</h1>")
 
103
 
104
  with gr.Row():
 
105
  with gr.Column(scale=1, min_width=250):
106
+ gr.Markdown("### 🧭 Main Menu")
107
+
108
  with gr.Accordion("πŸ“š Study Tutor", open=False):
109
+ subject_tutor = gr.Radio(["Math", "Science", "ICT", "English"], label="Select Subject")
110
+ tutor_input = gr.Textbox(label="Ask your question:")
111
+ tutor_output = gr.Textbox(label="Answer:")
112
+ tutor_btn = gr.Button("Get Answer", elem_classes="primary")
113
 
114
  with gr.Accordion("🧠 MCQ Generator", open=False):
115
+ mcq_subject = gr.Radio(["Math", "Science", "ICT", "English"], label="Select Subject")
116
+ mcq_output = gr.HTML("<i>Click 'Generate Quiz' to start!</i>")
117
+ mcq_btn = gr.Button("Generate Quiz", elem_classes="primary")
118
 
119
+ with gr.Accordion("πŸ“‹ Study Adviser", open=False):
120
+ adviser_input = gr.Textbox(label="Ask for study advice:")
121
+ adviser_output = gr.Textbox(label="Advice:")
 
 
122
  adviser_btn = gr.Button("Get Advice", elem_classes="primary")
 
123
 
124
+ with gr.Accordion("πŸ—“ Study Planner", open=False):
125
+ task_input = gr.Textbox(label="Add a Task:")
126
+ add_task_btn = gr.Button("Add Task", elem_classes="primary")
127
+ view_plan_btn = gr.Button("View Plan", elem_classes="primary")
128
+ planner_output = gr.Textbox(label="Planner Tasks:")
129
 
130
+ with gr.Column(scale=4):
131
+ chatbot_display = gr.Chatbot(label="πŸ’¬ EduAI Chat", height=480, type="messages")
 
 
132
  user_input = gr.Textbox(label="Type your question:")
133
+ send_btn = gr.Button("Send", elem_classes="primary")
134
+
135
+ # =========================
136
+ # Event Bindings
137
+ # =========================
138
+ # Study Tutor
139
+ tutor_btn.click(ai_response, inputs=[tutor_input, gr.State(value=[])], outputs=[gr.State(value=[]), tutor_output])
140
+
141
+ # MCQ Generator
142
+ mcq_btn.click(generate_mcq, inputs=[mcq_subject], outputs=[mcq_output])
143
+
144
+ # Study Adviser
145
+ adviser_btn.click(study_adviser, inputs=[adviser_input], outputs=[adviser_output])
146
+
147
+ # Study Planner
148
+ add_task_btn.click(add_task, inputs=[task_input], outputs=[planner_output])
149
+ view_plan_btn.click(view_plan, outputs=[planner_output])
150
+
151
+ # Chatbot
152
+ send_btn.click(ai_response, inputs=[user_input, gr.State(value=[])], outputs=[gr.State(value=[]), chatbot_display])
153
 
154
  iface.launch()