mikaelJ46 commited on
Commit
7420e28
Β·
verified Β·
1 Parent(s): b607765

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +98 -53
app.py CHANGED
@@ -1,6 +1,6 @@
1
  # --------------------------------------------------------------
2
- # IGCSE/GCSE Language Platform – Multi-AI System (Gemini + Cohere + Z.ai + MiniMax)
3
- # Models: Gemini 2.5 (Primary) β†’ Cohere β†’ Z.ai β†’ MiniMax (Fallbacks)
4
  # --------------------------------------------------------------
5
 
6
  import os
@@ -11,39 +11,37 @@ import PyPDF2
11
  import time
12
 
13
  # ---------- 1. Configure ALL AI Systems ----------
14
- # Gemini (Primary)
 
 
 
 
 
 
 
 
 
15
  try:
16
  import google.generativeai as genai
17
  genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
18
  gemini_model = genai.GenerativeModel('gemini-2.5-pro')
19
- print("βœ… Gemini AI initialized successfully (PRIMARY)")
20
  except Exception as e:
21
  print(f"❌ Error initializing Gemini: {e}")
22
  gemini_model = None
23
 
24
- # Cohere (Secondary)
25
  try:
26
  import cohere
27
  cohere_client = cohere.Client(os.getenv("COHERE_API_KEY"))
28
- print("βœ… Cohere initialized successfully (SECONDARY)")
29
  except Exception as e:
30
  print(f"❌ Error initializing Cohere: {e}")
31
  cohere_client = None
32
 
33
- # Z.ai (Tertiary)
34
- try:
35
- from huggingface_hub import InferenceClient
36
- zai_client = InferenceClient(
37
- provider="novita",
38
- api_key=os.environ.get("HF_TOKEN"),
39
- )
40
- print("βœ… Z.ai GLM-4.6 initialized successfully (TERTIARY)")
41
- except Exception as e:
42
- print(f"❌ Error initializing Z.ai: {e}")
43
- zai_client = None
44
-
45
  # MiniMax (Final Fallback)
46
  try:
 
47
  minimax_client = InferenceClient(
48
  provider="novita",
49
  api_key=os.environ.get("HF_TOKEN"),
@@ -56,12 +54,28 @@ except Exception as e:
56
  # ---------- 2. Unified AI Function with Smart Fallback ----------
57
  def ask_ai(prompt, temperature=0.7, max_retries=2):
58
  """
59
- Try models in order: Gemini β†’ Cohere β†’ Z.ai β†’ MiniMax
60
  Returns: (response_text, source_name)
61
  """
62
  last_error = None
63
 
64
- # Try Gemini first (Primary)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  if gemini_model:
66
  for attempt in range(max_retries):
67
  try:
@@ -78,7 +92,7 @@ def ask_ai(prompt, temperature=0.7, max_retries=2):
78
  if attempt < max_retries - 1:
79
  time.sleep(1)
80
 
81
- # Try Cohere (Secondary)
82
  if cohere_client:
83
  for attempt in range(max_retries):
84
  try:
@@ -94,22 +108,6 @@ def ask_ai(prompt, temperature=0.7, max_retries=2):
94
  if attempt < max_retries - 1:
95
  time.sleep(1)
96
 
97
- # Try Z.ai (Tertiary)
98
- if zai_client:
99
- for attempt in range(max_retries):
100
- try:
101
- completion = zai_client.chat.completions.create(
102
- model="zai-org/GLM-4.6",
103
- messages=[{"role": "user", "content": prompt}],
104
- temperature=temperature
105
- )
106
- return completion.choices[0].message.content, "zai"
107
- except Exception as e:
108
- last_error = e
109
- print(f"⚠ Z.ai attempt {attempt+1} failed: {str(e)}")
110
- if attempt < max_retries - 1:
111
- time.sleep(1)
112
-
113
  # Try MiniMax (Final Fallback)
114
  if minimax_client:
115
  try:
@@ -166,7 +164,7 @@ def extract_text_from_pdf(pdf_file):
166
  except Exception as e:
167
  return f"Error extracting PDF: {e}"
168
 
169
- # ---------- 6. AI Tutor with Multi-Model Support (FIXED) ----------
170
  def ai_tutor_chat(message, history, subject, topic, level):
171
  if not message.strip():
172
  return history
@@ -183,8 +181,8 @@ Use a friendly, supportive tone to help students learn effectively."""
183
  if user_msg:
184
  conversation += f"Student: {user_msg}\n"
185
  if bot_msg:
186
- # FIXED: Remove emoji indicators that match what we're actually adding
187
- clean_msg = bot_msg.replace("πŸ”΅ ", "").replace("🟒 ", "").replace("🟣 ", "")
188
  conversation += f"Tutor: {clean_msg}\n"
189
 
190
  conversation += f"Student: {message}\nTutor:"
@@ -192,11 +190,11 @@ Use a friendly, supportive tone to help students learn effectively."""
192
 
193
  bot_response, source = ask_ai(full_prompt, temperature=0.7)
194
 
195
- # Add source indicator if not from Gemini
196
- if source == "cohere":
197
  bot_response = f"πŸ”΅ {bot_response}"
198
- elif source == "zai":
199
- bot_response = f"🟒 {bot_response}"
200
  elif source == "minimax":
201
  bot_response = f"🟣 {bot_response}"
202
  elif source == "error":
@@ -225,7 +223,7 @@ Provide only the translation without explanations:
225
  response, source = ask_ai(prompt, temperature=0.3)
226
 
227
  # Add subtle source indicator if not primary
228
- if source in ["cohere", "zai", "minimax"]:
229
  response = f"{response}\n\n_[Translated using {source.title()}]_"
230
 
231
  return response
@@ -246,12 +244,59 @@ def dictionary_lookup(word):
246
 
247
  response, source = ask_ai(prompt, temperature=0.3)
248
 
249
- if source in ["cohere", "zai", "minimax"]:
250
  response = f"{response}\n\n_[Dictionary powered by {source.title()}]_"
251
 
252
  return response
253
 
254
- # ---------- 9. Practice Questions (Enhanced with PDF context) ----------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
255
  def generate_question(subject, topic, level):
256
  if not topic:
257
  return "Select a topic!", "", ""
@@ -315,14 +360,14 @@ Return JSON (no markdown):
315
  πŸ“ˆ How to Improve:
316
  {fb['improvements']}"""
317
 
318
- if source in ["cohere", "zai", "minimax"]:
319
  result += f"\n\n_[Graded by {source.title()}]_"
320
 
321
  return result
322
  except Exception:
323
  return response
324
 
325
- # ---------- 10. Admin – Past Papers ----------
326
  def verify_admin_password(password):
327
  if password == ADMIN_PASSWORD:
328
  return gr.update(visible=True), gr.update(visible=False), "βœ… Access granted!"
@@ -370,12 +415,12 @@ def view_papers_student(subject, level):
370
  for p in filtered
371
  )
372
 
373
- # ---------- 11. Gradio UI ----------
374
  with gr.Blocks(theme=gr.themes.Soft(), title="IGCSE/GCSE Platform") as app:
375
  gr.Markdown("""
376
  # πŸŽ“ IGCSE/GCSE Language Learning Platform
377
  πŸ€– AI Tutor | 🌐 Translator | πŸ“– Dictionary | πŸ“š Past Papers
378
- _Powered by Gemini AI with intelligent fallback system_
379
  """)
380
 
381
  with gr.Tabs():
@@ -384,7 +429,7 @@ with gr.Blocks(theme=gr.themes.Soft(), title="IGCSE/GCSE Platform") as app:
384
  with gr.Tabs():
385
  # AI TUTOR
386
  with gr.Tab("πŸ€– AI Tutor"):
387
- gr.Markdown("### Chat with Your AI Tutor\n*Powered by Gemini 2.5 with automatic fallback*")
388
  with gr.Row():
389
  subj = gr.Radio(["French", "EFL"], label="Subject", value="French")
390
  lvl = gr.Radio(["IGCSE", "GCSE"], label="Level", value="IGCSE")
@@ -474,7 +519,7 @@ with gr.Blocks(theme=gr.themes.Soft(), title="IGCSE/GCSE Platform") as app:
474
 
475
  gr.Markdown("""
476
  ---
477
- **System Status:** 🟒 Gemini AI (Primary) | πŸ”΅ Cohere (Secondary) | 🟒 Z.ai (Tertiary) | 🟣 MiniMax (Fallback)
478
  """)
479
 
480
  app.launch()
 
1
  # --------------------------------------------------------------
2
+ # IGCSE/GCSE Language Platform – Multi-AI System (Z.ai + Gemini + Cohere + MiniMax)
3
+ # Models: Z.ai (Primary) β†’ Gemini β†’ Cohere β†’ MiniMax (Fallbacks)
4
  # --------------------------------------------------------------
5
 
6
  import os
 
11
  import time
12
 
13
  # ---------- 1. Configure ALL AI Systems ----------
14
+ # Z.ai (Primary) - Using Z.ai SDK
15
+ try:
16
+ import zai
17
+ zai_client = zai.Client(api_key=os.getenv("ZAI_API_KEY"))
18
+ print("βœ… Z.ai SDK initialized successfully (PRIMARY)")
19
+ except Exception as e:
20
+ print(f"❌ Error initializing Z.ai SDK: {e}")
21
+ zai_client = None
22
+
23
+ # Gemini (Secondary)
24
  try:
25
  import google.generativeai as genai
26
  genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
27
  gemini_model = genai.GenerativeModel('gemini-2.5-pro')
28
+ print("βœ… Gemini AI initialized successfully (SECONDARY)")
29
  except Exception as e:
30
  print(f"❌ Error initializing Gemini: {e}")
31
  gemini_model = None
32
 
33
+ # Cohere (Tertiary)
34
  try:
35
  import cohere
36
  cohere_client = cohere.Client(os.getenv("COHERE_API_KEY"))
37
+ print("βœ… Cohere initialized successfully (TERTIARY)")
38
  except Exception as e:
39
  print(f"❌ Error initializing Cohere: {e}")
40
  cohere_client = None
41
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  # MiniMax (Final Fallback)
43
  try:
44
+ from huggingface_hub import InferenceClient
45
  minimax_client = InferenceClient(
46
  provider="novita",
47
  api_key=os.environ.get("HF_TOKEN"),
 
54
  # ---------- 2. Unified AI Function with Smart Fallback ----------
55
  def ask_ai(prompt, temperature=0.7, max_retries=2):
56
  """
57
+ Try models in order: Z.ai β†’ Gemini β†’ Cohere β†’ MiniMax
58
  Returns: (response_text, source_name)
59
  """
60
  last_error = None
61
 
62
+ # Try Z.ai first (Primary) - Using Z.ai SDK
63
+ if zai_client:
64
+ for attempt in range(max_retries):
65
+ try:
66
+ response = zai_client.chat.completions.create(
67
+ model="glm-4.6", # Replace with actual model name
68
+ messages=[{"role": "user", "content": prompt}],
69
+ temperature=temperature
70
+ )
71
+ return response.choices[0].message.content, "zai"
72
+ except Exception as e:
73
+ last_error = e
74
+ print(f"⚠ Z.ai attempt {attempt+1} failed: {str(e)}")
75
+ if attempt < max_retries - 1:
76
+ time.sleep(1)
77
+
78
+ # Try Gemini (Secondary)
79
  if gemini_model:
80
  for attempt in range(max_retries):
81
  try:
 
92
  if attempt < max_retries - 1:
93
  time.sleep(1)
94
 
95
+ # Try Cohere (Tertiary)
96
  if cohere_client:
97
  for attempt in range(max_retries):
98
  try:
 
108
  if attempt < max_retries - 1:
109
  time.sleep(1)
110
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
  # Try MiniMax (Final Fallback)
112
  if minimax_client:
113
  try:
 
164
  except Exception as e:
165
  return f"Error extracting PDF: {e}"
166
 
167
+ # ---------- 6. AI Tutor with Multi-Model Support ----------
168
  def ai_tutor_chat(message, history, subject, topic, level):
169
  if not message.strip():
170
  return history
 
181
  if user_msg:
182
  conversation += f"Student: {user_msg}\n"
183
  if bot_msg:
184
+ # Remove emoji indicators that match what we're actually adding
185
+ clean_msg = bot_msg.replace("πŸ”΅ ", "").replace("🟒 ", "").replace("🟣 ", "").replace("🟠 ", "")
186
  conversation += f"Tutor: {clean_msg}\n"
187
 
188
  conversation += f"Student: {message}\nTutor:"
 
190
 
191
  bot_response, source = ask_ai(full_prompt, temperature=0.7)
192
 
193
+ # Add source indicator if not from Z.ai
194
+ if source == "gemini":
195
  bot_response = f"πŸ”΅ {bot_response}"
196
+ elif source == "cohere":
197
+ bot_response = f"🟠 {bot_response}"
198
  elif source == "minimax":
199
  bot_response = f"🟣 {bot_response}"
200
  elif source == "error":
 
223
  response, source = ask_ai(prompt, temperature=0.3)
224
 
225
  # Add subtle source indicator if not primary
226
+ if source in ["gemini", "cohere", "minimax"]:
227
  response = f"{response}\n\n_[Translated using {source.title()}]_"
228
 
229
  return response
 
244
 
245
  response, source = ask_ai(prompt, temperature=0.3)
246
 
247
+ if source in ["gemini", "cohere", "minimax"]:
248
  response = f"{response}\n\n_[Dictionary powered by {source.title()}]_"
249
 
250
  return response
251
 
252
+ # ---------- 9. Search Past Papers for Real Questions ----------
253
+ def search_past_papers(subject, topic, level):
254
+ """Search uploaded past papers for questions matching the topic"""
255
+ if not topic:
256
+ return "⚠ Select a topic first!"
257
+
258
+ # Find matching papers
259
+ matching_content = []
260
+ for paper_id, content in pdf_content_storage.items():
261
+ paper = next((p for p in papers_storage if p['id'] == paper_id), None)
262
+ if paper and paper['subject'].lower() == subject.lower() and paper['level'] == level:
263
+ matching_content.append({
264
+ 'title': paper['title'],
265
+ 'content': content,
266
+ 'uploaded': paper['uploaded_at']
267
+ })
268
+
269
+ if not matching_content:
270
+ return f"πŸ“­ No past papers found for {subject} {level}.\n\nTip: Upload past papers in the Admin Panel to enable this feature."
271
+
272
+ # Use AI to extract relevant questions from the papers
273
+ combined_content = "\n\n".join([f"=== {p['title']} ===\n{p['content'][:5000]}" for p in matching_content])
274
+
275
+ prompt = f"""You are analyzing real {level} {subject} past papers to find questions about "{topic}".
276
+
277
+ PAST PAPER CONTENT:
278
+ {combined_content}
279
+
280
+ TASK: Extract and return ALL questions from these papers that relate to the topic "{topic}".
281
+
282
+ For each question found, provide:
283
+ 1. The complete question text (exactly as written)
284
+ 2. The paper it came from
285
+ 3. Any mark allocations mentioned
286
+ 4. Any accompanying resources/images mentioned
287
+
288
+ Format your response clearly with question numbers and paper sources.
289
+ If no questions directly match this topic, return questions from related topics and explain the connection.
290
+ If no relevant questions exist at all, clearly state this."""
291
+
292
+ response, source = ask_ai(prompt, temperature=0.3)
293
+
294
+ if source in ["gemini", "cohere", "minimax"]:
295
+ response = f"{response}\n\n_[Search powered by {source.title()}]_"
296
+
297
+ return response
298
+
299
+ # ---------- 10. Practice Questions (Enhanced with PDF context) ----------
300
  def generate_question(subject, topic, level):
301
  if not topic:
302
  return "Select a topic!", "", ""
 
360
  πŸ“ˆ How to Improve:
361
  {fb['improvements']}"""
362
 
363
+ if source in ["gemini", "cohere", "minimax"]:
364
  result += f"\n\n_[Graded by {source.title()}]_"
365
 
366
  return result
367
  except Exception:
368
  return response
369
 
370
+ # ---------- 11. Admin – Past Papers ----------
371
  def verify_admin_password(password):
372
  if password == ADMIN_PASSWORD:
373
  return gr.update(visible=True), gr.update(visible=False), "βœ… Access granted!"
 
415
  for p in filtered
416
  )
417
 
418
+ # ---------- 12. Gradio UI ----------
419
  with gr.Blocks(theme=gr.themes.Soft(), title="IGCSE/GCSE Platform") as app:
420
  gr.Markdown("""
421
  # πŸŽ“ IGCSE/GCSE Language Learning Platform
422
  πŸ€– AI Tutor | 🌐 Translator | πŸ“– Dictionary | πŸ“š Past Papers
423
+ _Powered by Z.ai with intelligent multi-model fallback system_
424
  """)
425
 
426
  with gr.Tabs():
 
429
  with gr.Tabs():
430
  # AI TUTOR
431
  with gr.Tab("πŸ€– AI Tutor"):
432
+ gr.Markdown("### Chat with Your AI Tutor\n*Powered by Z.ai with automatic fallback*")
433
  with gr.Row():
434
  subj = gr.Radio(["French", "EFL"], label="Subject", value="French")
435
  lvl = gr.Radio(["IGCSE", "GCSE"], label="Level", value="IGCSE")
 
519
 
520
  gr.Markdown("""
521
  ---
522
+ **System Status:** 🟒 Z.ai (Primary) | πŸ”΅ Gemini (Secondary) | 🟠 Cohere (Tertiary) | 🟣 MiniMax (Fallback)
523
  """)
524
 
525
  app.launch()