Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ import json
|
|
| 6 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 7 |
MODEL_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-V3.2-Exp:novita"
|
| 8 |
|
| 9 |
-
#
|
| 10 |
def ai_response(message, history, context, file_input=None):
|
| 11 |
if not isinstance(history, list):
|
| 12 |
history = []
|
|
@@ -16,9 +16,9 @@ def ai_response(message, history, context, file_input=None):
|
|
| 16 |
|
| 17 |
conversation = [
|
| 18 |
{"role": "system", "content": (
|
| 19 |
-
"You are EduAI, a
|
| 20 |
-
"Explain
|
| 21 |
-
"
|
| 22 |
)}
|
| 23 |
]
|
| 24 |
for past_user, past_bot in history[-5:]:
|
|
@@ -43,7 +43,7 @@ def ai_response(message, history, context, file_input=None):
|
|
| 43 |
history.append((message, f"❌ Error contacting model: {e}"))
|
| 44 |
return history, history
|
| 45 |
|
| 46 |
-
#
|
| 47 |
MEMORY_FILE = "memory.json"
|
| 48 |
def clear_memory():
|
| 49 |
if os.path.exists(MEMORY_FILE):
|
|
@@ -58,7 +58,7 @@ def update_context(choice):
|
|
| 58 |
|
| 59 |
# 🧩 Dynamic MCQ generator via AI
|
| 60 |
def generate_mcq(topic):
|
| 61 |
-
prompt = f"Create a short 3-question MCQ quiz about {topic} with 4 options each
|
| 62 |
try:
|
| 63 |
response = requests.post(
|
| 64 |
MODEL_URL,
|
|
@@ -107,15 +107,15 @@ with gr.Blocks(css=custom_css) as iface:
|
|
| 107 |
|
| 108 |
with gr.Column(scale=4):
|
| 109 |
context_display = gr.Markdown("📘 **General Mode**")
|
| 110 |
-
chatbot = gr.Chatbot(elem_id="chatbot", height=480)
|
| 111 |
user_input = gr.Textbox(label="Type your message")
|
| 112 |
send_btn = gr.Button("Send ✈️", elem_classes="primary")
|
| 113 |
|
| 114 |
# Event bindings
|
| 115 |
-
send_btn.click(ai_response, inputs=[user_input, chatbot, context_display,
|
| 116 |
clear_btn.click(clear_memory, outputs=[chatbot, context_display])
|
| 117 |
gen_mcq_btn.click(generate_mcq, inputs=[mcq_topic], outputs=mcq_output)
|
| 118 |
-
advisor_btn.click(ai_response, inputs=[advisor_txt, chatbot, context_display,
|
| 119 |
tutor_sub.change(update_context, inputs=tutor_sub, outputs=context_display)
|
| 120 |
planner_opt.change(update_context, inputs=planner_opt, outputs=context_display)
|
| 121 |
|
|
|
|
| 6 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 7 |
MODEL_URL = "https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-V3.2-Exp:novita"
|
| 8 |
|
| 9 |
+
# 🧠 AI Response function
|
| 10 |
def ai_response(message, history, context, file_input=None):
|
| 11 |
if not isinstance(history, list):
|
| 12 |
history = []
|
|
|
|
| 16 |
|
| 17 |
conversation = [
|
| 18 |
{"role": "system", "content": (
|
| 19 |
+
"You are EduAI, a professional educational AI assistant. "
|
| 20 |
+
"Explain clearly and step-by-step. Generate MCQs dynamically when requested. "
|
| 21 |
+
"Use Markdown and LaTeX for math."
|
| 22 |
)}
|
| 23 |
]
|
| 24 |
for past_user, past_bot in history[-5:]:
|
|
|
|
| 43 |
history.append((message, f"❌ Error contacting model: {e}"))
|
| 44 |
return history, history
|
| 45 |
|
| 46 |
+
# 🧹 Clear Chat Memory
|
| 47 |
MEMORY_FILE = "memory.json"
|
| 48 |
def clear_memory():
|
| 49 |
if os.path.exists(MEMORY_FILE):
|
|
|
|
| 58 |
|
| 59 |
# 🧩 Dynamic MCQ generator via AI
|
| 60 |
def generate_mcq(topic):
|
| 61 |
+
prompt = f"Create a short 3-question MCQ quiz about {topic} with 4 options each and indicate the correct answer."
|
| 62 |
try:
|
| 63 |
response = requests.post(
|
| 64 |
MODEL_URL,
|
|
|
|
| 107 |
|
| 108 |
with gr.Column(scale=4):
|
| 109 |
context_display = gr.Markdown("📘 **General Mode**")
|
| 110 |
+
chatbot = gr.Chatbot(label="💬 EduAI Chat", elem_id="chatbot", height=480, type="messages")
|
| 111 |
user_input = gr.Textbox(label="Type your message")
|
| 112 |
send_btn = gr.Button("Send ✈️", elem_classes="primary")
|
| 113 |
|
| 114 |
# Event bindings
|
| 115 |
+
send_btn.click(ai_response, inputs=[user_input, chatbot, context_display, gr.File(visible=False)], outputs=[chatbot, chatbot])
|
| 116 |
clear_btn.click(clear_memory, outputs=[chatbot, context_display])
|
| 117 |
gen_mcq_btn.click(generate_mcq, inputs=[mcq_topic], outputs=mcq_output)
|
| 118 |
+
advisor_btn.click(ai_response, inputs=[advisor_txt, chatbot, context_display, gr.File(visible=False)], outputs=[chatbot, chatbot])
|
| 119 |
tutor_sub.change(update_context, inputs=tutor_sub, outputs=context_display)
|
| 120 |
planner_opt.change(update_context, inputs=planner_opt, outputs=context_display)
|
| 121 |
|