Wfafa commited on
Commit
2935bc3
Β·
verified Β·
1 Parent(s): 938a41c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -65
app.py CHANGED
@@ -3,120 +3,141 @@ import gradio as gr
3
  import requests
4
  import json
5
 
 
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
- MODEL_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-V3.2-Exp:novita"
8
 
9
- # 🧠 AI Response function
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  def ai_response(message, history, context, file_input=None):
11
  if not isinstance(history, list):
12
  history = []
13
 
14
  if file_input:
15
- message += f"\n\nπŸ“Ž (User uploaded a file named '{file_input.name}')"
16
 
17
  conversation = [
18
- {"role": "system", "content": (
19
- "You are EduAI, a professional educational AI assistant. "
20
- "Explain clearly and step-by-step. Generate MCQs dynamically when requested. "
21
- "Use Markdown and LaTeX for math."
22
  )}
23
  ]
 
24
  for past_user, past_bot in history[-5:]:
25
- conversation.append({"role": "user", "content": past_user})
26
- conversation.append({"role": "assistant", "content": past_bot})
27
- conversation.append({"role": "user", "content": message})
28
 
29
  try:
30
  response = requests.post(
31
- MODEL_URL,
32
- headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type": "application/json"},
33
- json={"inputs": conversation, "parameters":{"max_new_tokens":512}}
34
  )
35
  data = response.json()
36
- if isinstance(data, dict) and "error" in data:
37
- reply = "❌ Error contacting model: " + data["error"]
38
- else:
39
- reply = data[0]["generated_text"] if isinstance(data, list) else str(data)
40
  history.append((message, reply))
 
41
  return history, history
42
  except Exception as e:
43
  history.append((message, f"❌ Error contacting model: {e}"))
44
  return history, history
45
 
46
- # 🧹 Clear Chat Memory
47
- MEMORY_FILE = "memory.json"
48
- def clear_memory():
49
- if os.path.exists(MEMORY_FILE):
50
- os.remove(MEMORY_FILE)
51
- return [], "🧹 Chat memory cleared! Start fresh."
52
-
53
- # πŸ“˜ Sidebar context
54
- def update_context(choice):
55
- if not choice:
56
- return "πŸ“˜ **General Mode.** Ask EduAI anything!"
57
- return f"πŸ“˜ **You selected {choice}.**"
58
-
59
- # 🧩 Dynamic MCQ generator via AI
60
  def generate_mcq(topic):
61
- prompt = f"Create a short 3-question MCQ quiz about {topic} with 4 options each and indicate the correct answer."
62
  try:
63
  response = requests.post(
64
- MODEL_URL,
65
- headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type": "application/json"},
66
- json={"inputs": prompt, "parameters":{"max_new_tokens":400}}
67
  )
68
  data = response.json()
69
- text = data[0]["generated_text"] if isinstance(data, list) else str(data)
70
- return f"<div style='color:white'>{text.replace(chr(10), '<br>')}</div>"
 
 
 
 
 
71
  except Exception as e:
72
  return f"❌ Error generating MCQs: {e}"
73
 
74
- # 🎨 Gradio UI
 
 
 
 
 
 
 
 
 
 
 
 
75
  custom_css = """
76
- body {background-color:#1e1e2f; color:white; font-family:'Poppins', sans-serif;}
77
- #chatbot {background: rgba(30,30,50,0.85); border-radius:12px; backdrop-filter: blur(8px);}
78
  button.primary {background: linear-gradient(90deg,#7c3aed,#a855f7); color:white; border:none; border-radius:8px;}
79
  button.primary:hover {background: linear-gradient(90deg,#6d28d9,#9333ea);}
 
80
  """
81
 
82
- with gr.Blocks(css=custom_css) as iface:
83
- gr.Markdown("<h1 style='text-align:center; color:#a855f7'>πŸŽ“ EduAI β€” Smart Study Companion</h1>")
 
84
 
85
  with gr.Row():
86
- with gr.Column(scale=1, min_width=220):
87
- gr.Markdown("### 🧭 Main Menu")
88
-
89
  with gr.Accordion("πŸ“š Study Tutor", open=False):
90
- tutor_sub = gr.Radio(["Math", "Science", "ICT", "English"], label="Select Subject")
91
 
92
  with gr.Accordion("🧠 MCQ Generator", open=False):
93
- mcq_topic = gr.Textbox(label="Topic for Quiz")
94
- gen_mcq_btn = gr.Button("Generate MCQ", elem_classes="primary")
95
- mcq_output = gr.HTML("<i>Enter a topic and click 'Generate MCQ'</i>")
96
 
97
  with gr.Accordion("πŸ—“ Study Planner", open=False):
98
- planner_opt = gr.Radio(["View Plan", "Add Task", "Study Tips"], label="Planner Options")
99
 
100
- with gr.Accordion("πŸ“– Study Advisor", open=False):
101
- advisor_txt = gr.Textbox(label="Ask for Study Advice")
102
- advisor_btn = gr.Button("Get Advice", elem_classes="primary")
103
- advisor_output = gr.HTML("<i>Ask your study question here</i>")
104
 
105
  with gr.Accordion("βš™οΈ Settings", open=False):
106
- clear_btn = gr.Button("🧹 Clear Chat Memory")
107
 
108
- with gr.Column(scale=4):
109
- context_display = gr.Markdown("πŸ“˜ **General Mode**")
110
- chatbot = gr.Chatbot(label="πŸ’¬ EduAI Chat", elem_id="chatbot", height=480, type="messages")
111
- user_input = gr.Textbox(label="Type your message")
 
112
  send_btn = gr.Button("Send ✈️", elem_classes="primary")
 
113
 
114
- # Event bindings
115
- send_btn.click(ai_response, inputs=[user_input, chatbot, context_display, gr.File(visible=False)], outputs=[chatbot, chatbot])
 
 
116
  clear_btn.click(clear_memory, outputs=[chatbot, context_display])
117
- gen_mcq_btn.click(generate_mcq, inputs=[mcq_topic], outputs=mcq_output)
118
- advisor_btn.click(ai_response, inputs=[advisor_txt, chatbot, context_display, gr.File(visible=False)], outputs=[chatbot, chatbot])
119
- tutor_sub.change(update_context, inputs=tutor_sub, outputs=context_display)
120
- planner_opt.change(update_context, inputs=planner_opt, outputs=context_display)
121
 
122
  iface.launch()
 
3
  import requests
4
  import json
5
 
6
+ # 🌍 Hugging Face DeepSeek Model Token
7
  HF_TOKEN = os.getenv("HF_TOKEN")
 
8
 
9
+ # 🧠 Memory
10
+ MEMORY_FILE = "memory.json"
11
+ def load_memory():
12
+ if os.path.exists(MEMORY_FILE):
13
+ with open(MEMORY_FILE, "r") as f:
14
+ return json.load(f)
15
+ return []
16
+
17
+ def save_memory(memory):
18
+ with open(MEMORY_FILE, "w") as f:
19
+ json.dump(memory, f)
20
+
21
+ memory = load_memory()
22
+
23
+ # πŸ’¬ Chat function with Hugging Face
24
  def ai_response(message, history, context, file_input=None):
25
  if not isinstance(history, list):
26
  history = []
27
 
28
  if file_input:
29
+ message += f"\n\nπŸ“Ž (User uploaded file '{file_input.name}')"
30
 
31
  conversation = [
32
+ {"role":"system", "content":(
33
+ "You are EduAI, a multilingual educational AI assistant. "
34
+ "Explain answers like a professional tutor, use Markdown/LaTeX for math, "
35
+ "and keep answers neat and student-friendly."
36
  )}
37
  ]
38
+
39
  for past_user, past_bot in history[-5:]:
40
+ conversation.append({"role":"user","content":past_user})
41
+ conversation.append({"role":"assistant","content":past_bot})
42
+ conversation.append({"role":"user","content":message})
43
 
44
  try:
45
  response = requests.post(
46
+ "https://router.huggingface.co/v1/chat/completions",
47
+ headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type":"application/json"},
48
+ json={"model":"deepseek-ai/DeepSeek-V3.2-Exp:novita", "messages":conversation}
49
  )
50
  data = response.json()
51
+ reply = data["choices"][0]["message"]["content"]
 
 
 
52
  history.append((message, reply))
53
+ save_memory(history)
54
  return history, history
55
  except Exception as e:
56
  history.append((message, f"❌ Error contacting model: {e}"))
57
  return history, history
58
 
59
+ # 🧠 MCQ generator (dynamic)
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  def generate_mcq(topic):
61
+ prompt = f"Create a 3-question multiple-choice quiz about '{topic}' with 4 options each and indicate the correct answer."
62
  try:
63
  response = requests.post(
64
+ "https://router.huggingface.co/v1/chat/completions",
65
+ headers={"Authorization": f"Bearer {HF_TOKEN}", "Content-Type":"application/json"},
66
+ json={"model":"deepseek-ai/DeepSeek-V3.2-Exp:novita", "messages":[{"role":"user","content":prompt}]}
67
  )
68
  data = response.json()
69
+ reply = data["choices"][0]["message"]["content"]
70
+ # Simple formatting: each question as separate div
71
+ quiz_html = "<div style='color:white; font-family:Poppins, sans-serif;'>"
72
+ for i, line in enumerate(reply.split("\n")):
73
+ quiz_html += f"{line}<br>"
74
+ quiz_html += "</div>"
75
+ return quiz_html
76
  except Exception as e:
77
  return f"❌ Error generating MCQs: {e}"
78
 
79
+ # πŸ“˜ Sidebar context
80
+ def update_context(choice):
81
+ if not choice:
82
+ return "πŸ“˜ **You are in General Mode.** Ask EduAI anything about your studies!"
83
+ return f"πŸ“˜ **You selected {choice} mode.** Ask anything related to this topic!"
84
+
85
+ # 🧹 Clear chat
86
+ def clear_memory():
87
+ if os.path.exists(MEMORY_FILE):
88
+ os.remove(MEMORY_FILE)
89
+ return [], "🧹 Chat memory cleared! Start fresh."
90
+
91
+ # 🎨 UI Custom CSS
92
  custom_css = """
93
+ .gradio-container {background: #1e1e2f; font-family:'Poppins', sans-serif; color:white;}
94
+ #chatbot {background: rgba(30,30,47,0.8); border-radius: 16px; backdrop-filter: blur(10px);}
95
  button.primary {background: linear-gradient(90deg,#7c3aed,#a855f7); color:white; border:none; border-radius:8px;}
96
  button.primary:hover {background: linear-gradient(90deg,#6d28d9,#9333ea);}
97
+ input[type=checkbox] {accent-color:#a855f7;}
98
  """
99
 
100
+ # 🎨 Gradio Blocks
101
+ with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="violet")) as iface:
102
+ gr.Markdown("<h1 style='text-align:center; color:#A855F7;'>πŸŽ“ EduAI β€” Smart Study Companion</h1>")
103
 
104
  with gr.Row():
105
+ # 🌈 Left Sidebar
106
+ with gr.Column(scale=1, min_width=250):
107
+ gr.Markdown("### 🧭 **Main Menu**")
108
  with gr.Accordion("πŸ“š Study Tutor", open=False):
109
+ tutor_subj = gr.Radio(["Mathematics βž—","Science πŸ§ͺ","ICT πŸ’»","English πŸ“˜"], label="Select Subject")
110
 
111
  with gr.Accordion("🧠 MCQ Generator", open=False):
112
+ mcq_topic = gr.Textbox(label="Enter topic for quiz:")
113
+ mcq_btn = gr.Button("Generate Quiz 🎯", elem_classes="primary")
114
+ mcq_output = gr.HTML("<i>Quiz will appear here...</i>")
115
 
116
  with gr.Accordion("πŸ—“ Study Planner", open=False):
117
+ planner_choice = gr.Radio(["View Plan πŸ“…","Add Task ✏️","Study Tips πŸ’‘"], label="Planner Options")
118
 
119
+ with gr.Accordion("πŸ’‘ Study Adviser", open=False):
120
+ adviser_input = gr.Textbox(label="Ask for study advice")
121
+ adviser_btn = gr.Button("Get Advice", elem_classes="primary")
122
+ adviser_output = gr.HTML("<i>Advice will appear here...</i>")
123
 
124
  with gr.Accordion("βš™οΈ Settings", open=False):
125
+ clear_btn = gr.Button("🧹 Clear Memory")
126
 
127
+ # πŸ’¬ Main Chat
128
+ with gr.Column(scale=3):
129
+ context_display = gr.Markdown("πŸ“˜ **General Mode.** Ask EduAI anything!")
130
+ chatbot = gr.Chatbot(label="EduAI Chat", elem_id="chatbot", height=450, type="messages")
131
+ user_input = gr.Textbox(label="Type your question:")
132
  send_btn = gr.Button("Send ✈️", elem_classes="primary")
133
+ file_input = gr.File(label="πŸ“‚ Upload optional file")
134
 
135
+ # βš™οΈ Event handlers
136
+ tutor_subj.change(update_context, inputs=tutor_subj, outputs=context_display)
137
+ planner_choice.change(update_context, inputs=planner_choice, outputs=context_display)
138
+ send_btn.click(ai_response, inputs=[user_input, chatbot, context_display, file_input], outputs=[chatbot, chatbot])
139
  clear_btn.click(clear_memory, outputs=[chatbot, context_display])
140
+ mcq_btn.click(generate_mcq, inputs=mcq_topic, outputs=mcq_output)
141
+ adviser_btn.click(ai_response, inputs=[adviser_input, chatbot, context_display, None], outputs=[adviser_output, adviser_output])
 
 
142
 
143
  iface.launch()