mikaelJ46 commited on
Commit
eb22e20
Β·
verified Β·
1 Parent(s): 5eb99fd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -63
app.py CHANGED
@@ -1,6 +1,6 @@
1
  # --------------------------------------------------------------
2
- # IGCSE/GCSE Language Platform – Multi-AI System (Z.ai + Cohere + MiniMax + Gemini)
3
- # Models: Z.ai GLM-4.6 (Primary) β†’ Cohere β†’ MiniMax β†’ Gemini (Fallbacks)
4
  # --------------------------------------------------------------
5
 
6
  import os
@@ -11,69 +11,70 @@ import PyPDF2
11
  import time
12
 
13
  # ---------- 1. Configure ALL AI Systems ----------
14
- # Z.ai (Primary)
15
  try:
16
- from huggingface_hub import InferenceClient
17
- zai_client = InferenceClient(
18
- provider="novita",
19
- api_key=os.environ.get("HF_TOKEN"),
20
- )
21
- print(" Z.ai GLM-4.6 initialized successfully")
22
  except Exception as e:
23
- print(f" Error initializing Z.ai: {e}")
24
- zai_client = None
25
 
26
  # Cohere (Secondary)
27
  try:
28
  import cohere
29
  cohere_client = cohere.Client(os.getenv("COHERE_API_KEY"))
30
- print(" Cohere initialized successfully")
31
  except Exception as e:
32
  print(f" Error initializing Cohere: {e}")
33
  cohere_client = None
34
 
35
- # MiniMax (Tertiary)
36
  try:
37
- minimax_client = InferenceClient(
 
38
  provider="novita",
39
  api_key=os.environ.get("HF_TOKEN"),
40
  )
41
- print(" MiniMax AI initialized successfully")
42
  except Exception as e:
43
- print(f" Error initializing MiniMax: {e}")
44
- minimax_client = None
45
 
46
- # Gemini (Final Fallback)
47
  try:
48
- import google.generativeai as genai
49
- genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
50
- gemini_model = genai.GenerativeModel('gemini-2.5')
51
- print(" Gemini AI initialized successfully")
 
52
  except Exception as e:
53
- print(f" Error initializing Gemini: {e}")
54
- gemini_model = None
55
 
56
  # ---------- 2. Unified AI Function with Smart Fallback ----------
57
  def ask_ai(prompt, temperature=0.7, max_retries=2):
58
  """
59
- Try models in order: Z.ai β†’ Cohere β†’ MiniMax β†’ Gemini
60
  Returns: (response_text, source_name)
61
  """
62
  last_error = None
63
 
64
- # Try Z.ai first (Primary)
65
- if zai_client:
66
  for attempt in range(max_retries):
67
  try:
68
- completion = zai_client.chat.completions.create(
69
- model="zai-org/GLM-4.6",
70
- messages=[{"role": "user", "content": prompt}],
71
- temperature=temperature
 
72
  )
73
- return completion.choices[0].message.content, "zai"
74
  except Exception as e:
75
  last_error = e
76
- print(f" Z.ai attempt {attempt+1} failed: {str(e)}")
77
  if attempt < max_retries - 1:
78
  time.sleep(1)
79
 
@@ -93,35 +94,34 @@ def ask_ai(prompt, temperature=0.7, max_retries=2):
93
  if attempt < max_retries - 1:
94
  time.sleep(1)
95
 
96
- # Try MiniMax (Tertiary)
97
- if minimax_client:
98
  for attempt in range(max_retries):
99
  try:
100
- completion = minimax_client.chat.completions.create(
101
- model="MiniMaxAI/MiniMax-M2",
102
  messages=[{"role": "user", "content": prompt}],
103
  temperature=temperature
104
  )
105
- return completion.choices[0].message.content, "minimax"
106
  except Exception as e:
107
  last_error = e
108
- print(f" MiniMax attempt {attempt+1} failed: {str(e)}")
109
  if attempt < max_retries - 1:
110
  time.sleep(1)
111
 
112
- # Try Gemini (Final Fallback)
113
- if gemini_model:
114
  try:
115
- response = gemini_model.generate_content(
116
- prompt,
117
- generation_config=genai.types.GenerationConfig(
118
- temperature=temperature,
119
- )
120
  )
121
- return response.text, "gemini"
122
  except Exception as e:
123
  last_error = e
124
- print(f" Gemini fallback failed: {str(e)}")
125
 
126
  # All failed
127
  error_msg = f" Error: All AI services failed. Last error: {str(last_error)}"
@@ -184,7 +184,7 @@ Use a friendly, supportive tone to help students learn effectively."""
184
  conversation += f"Student: {user_msg}\n"
185
  if bot_msg:
186
  # Remove source indicators from history
187
- clean_msg = bot_msg.replace("[Cohere] ", "").replace("[MiniMax] ", "").replace("[Gemini] ", "")
188
  conversation += f"Tutor: {clean_msg}\n"
189
 
190
  conversation += f"Student: {message}\nTutor:"
@@ -192,13 +192,13 @@ Use a friendly, supportive tone to help students learn effectively."""
192
 
193
  bot_response, source = ask_ai(full_prompt, temperature=0.7)
194
 
195
- # Add source indicator if not from Z.ai
196
  if source == "cohere":
197
- bot_response = f" {bot_response}"
 
 
198
  elif source == "minimax":
199
- bot_response = f" {bot_response}"
200
- elif source == "gemini":
201
- bot_response = f" {bot_response}"
202
  elif source == "error":
203
  pass # Error already formatted
204
 
@@ -224,8 +224,8 @@ Provide only the translation without explanations:
224
 
225
  response, source = ask_ai(prompt, temperature=0.3)
226
 
227
- # Add subtle source indicator
228
- if source in ["cohere", "minimax", "gemini"]:
229
  response = f"{response}\n\n_[Translated using {source.title()}]_"
230
 
231
  return response
@@ -246,7 +246,7 @@ def dictionary_lookup(word):
246
 
247
  response, source = ask_ai(prompt, temperature=0.3)
248
 
249
- if source in ["cohere", "minimax", "gemini"]:
250
  response = f"{response}\n\n_[Dictionary powered by {source.title()}]_"
251
 
252
  return response
@@ -315,7 +315,7 @@ Return JSON (no markdown):
315
  How to Improve:
316
  {fb['improvements']}"""
317
 
318
- if source in ["cohere", "minimax", "gemini"]:
319
  result += f"\n\n_[Graded by {source.title()}]_"
320
 
321
  return result
@@ -339,7 +339,7 @@ def upload_paper(title, subject, level, content, pdf_file):
339
  pdf_text = extract_text_from_pdf(pdf_file)
340
  if pdf_text and not pdf_text.startswith("Error"):
341
  pdf_content_storage[paper_id] = pdf_text
342
- content += f"\n\n[ PDF extracted: {len(pdf_text)} characters]"
343
 
344
  papers_storage.append({
345
  "id": paper_id,
@@ -356,7 +356,7 @@ def get_papers_list():
356
  if not papers_storage:
357
  return "No papers yet."
358
  return "\n".join(
359
- f"**{p['title']}** ({p['subject'].upper()} - {p['level']}) {'πŸ“„ PDF' if p.get('has_pdf') else 'πŸ“'}\nπŸ“… {p['uploaded_at']}\n{p['content'][:120]}...\n{'─'*60}"
360
  for p in papers_storage
361
  )
362
 
@@ -374,15 +374,16 @@ def view_papers_student(subject, level):
374
  with gr.Blocks(theme=gr.themes.Soft(), title="IGCSE/GCSE Platform") as app:
375
  gr.Markdown("""
376
  # IGCSE/GCSE Language Learning Platform
377
- AI Tutor | Translator | Dictionary | Past Papers
 
378
  """)
379
 
380
  with gr.Tabs():
381
  # ───── STUDENT ─────
382
  with gr.Tab(" Student Portal"):
383
  with gr.Tabs():
384
- with gr.Tab(" AI Tutor"):
385
- gr.Markdown("### Chat with Your AI Tutor\n*Powered by Z.ai with automatic fallback*")
386
  with gr.Row():
387
  subj = gr.Radio(["French", "EFL"], label="Subject", value="French")
388
  lvl = gr.Radio(["IGCSE", "GCSE"], label="Level", value="IGCSE")
 
1
  # --------------------------------------------------------------
2
+ # IGCSE/GCSE Language Platform – Multi-AI System (Gemini + Cohere + Z.ai + MiniMax)
3
+ # Models: Gemini 2.5 (Primary) β†’ Cohere β†’ Z.ai β†’ MiniMax (Fallbacks)
4
  # --------------------------------------------------------------
5
 
6
  import os
 
11
  import time
12
 
13
  # ---------- 1. Configure ALL AI Systems ----------
14
+ # Gemini (Primary)
15
  try:
16
+ import google.generativeai as genai
17
+ genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
18
+ gemini_model = genai.GenerativeModel('gemini-2.5')
19
+ print(" Gemini AI initialized successfully (PRIMARY)")
 
 
20
  except Exception as e:
21
+ print(f" Error initializing Gemini: {e}")
22
+ gemini_model = None
23
 
24
  # Cohere (Secondary)
25
  try:
26
  import cohere
27
  cohere_client = cohere.Client(os.getenv("COHERE_API_KEY"))
28
+ print(" Cohere initialized successfully (SECONDARY)")
29
  except Exception as e:
30
  print(f" Error initializing Cohere: {e}")
31
  cohere_client = None
32
 
33
+ # Z.ai (Tertiary)
34
  try:
35
+ from huggingface_hub import InferenceClient
36
+ zai_client = InferenceClient(
37
  provider="novita",
38
  api_key=os.environ.get("HF_TOKEN"),
39
  )
40
+ print(" Z.ai GLM-4.6 initialized successfully (TERTIARY)")
41
  except Exception as e:
42
+ print(f" Error initializing Z.ai: {e}")
43
+ zai_client = None
44
 
45
+ # MiniMax (Final Fallback)
46
  try:
47
+ minimax_client = InferenceClient(
48
+ provider="novita",
49
+ api_key=os.environ.get("HF_TOKEN"),
50
+ )
51
+ print(" MiniMax AI initialized successfully (FINAL FALLBACK)")
52
  except Exception as e:
53
+ print(f" Error initializing MiniMax: {e}")
54
+ minimax_client = None
55
 
56
  # ---------- 2. Unified AI Function with Smart Fallback ----------
57
  def ask_ai(prompt, temperature=0.7, max_retries=2):
58
  """
59
+ Try models in order: Gemini β†’ Cohere β†’ Z.ai β†’ MiniMax
60
  Returns: (response_text, source_name)
61
  """
62
  last_error = None
63
 
64
+ # Try Gemini first (Primary)
65
+ if gemini_model:
66
  for attempt in range(max_retries):
67
  try:
68
+ response = gemini_model.generate_content(
69
+ prompt,
70
+ generation_config=genai.types.GenerationConfig(
71
+ temperature=temperature,
72
+ )
73
  )
74
+ return response.text, "gemini"
75
  except Exception as e:
76
  last_error = e
77
+ print(f" Gemini attempt {attempt+1} failed: {str(e)}")
78
  if attempt < max_retries - 1:
79
  time.sleep(1)
80
 
 
94
  if attempt < max_retries - 1:
95
  time.sleep(1)
96
 
97
+ # Try Z.ai (Tertiary)
98
+ if zai_client:
99
  for attempt in range(max_retries):
100
  try:
101
+ completion = zai_client.chat.completions.create(
102
+ model="zai-org/GLM-4.6",
103
  messages=[{"role": "user", "content": prompt}],
104
  temperature=temperature
105
  )
106
+ return completion.choices[0].message.content, "zai"
107
  except Exception as e:
108
  last_error = e
109
+ print(f" Z.ai attempt {attempt+1} failed: {str(e)}")
110
  if attempt < max_retries - 1:
111
  time.sleep(1)
112
 
113
+ # Try MiniMax (Final Fallback)
114
+ if minimax_client:
115
  try:
116
+ completion = minimax_client.chat.completions.create(
117
+ model="MiniMaxAI/MiniMax-M2",
118
+ messages=[{"role": "user", "content": prompt}],
119
+ temperature=temperature
 
120
  )
121
+ return completion.choices[0].message.content, "minimax"
122
  except Exception as e:
123
  last_error = e
124
+ print(f" MiniMax fallback failed: {str(e)}")
125
 
126
  # All failed
127
  error_msg = f" Error: All AI services failed. Last error: {str(last_error)}"
 
184
  conversation += f"Student: {user_msg}\n"
185
  if bot_msg:
186
  # Remove source indicators from history
187
+ clean_msg = bot_msg.replace("[Cohere] ", "").replace("[Z.ai] ", "").replace("[MiniMax] ", "")
188
  conversation += f"Tutor: {clean_msg}\n"
189
 
190
  conversation += f"Student: {message}\nTutor:"
 
192
 
193
  bot_response, source = ask_ai(full_prompt, temperature=0.7)
194
 
195
+ # Add source indicator if not from Gemini
196
  if source == "cohere":
197
+ bot_response = f"πŸ”΅ {bot_response}"
198
+ elif source == "zai":
199
+ bot_response = f"🟒 {bot_response}"
200
  elif source == "minimax":
201
+ bot_response = f"🟣 {bot_response}"
 
 
202
  elif source == "error":
203
  pass # Error already formatted
204
 
 
224
 
225
  response, source = ask_ai(prompt, temperature=0.3)
226
 
227
+ # Add subtle source indicator if not primary
228
+ if source in ["cohere", "zai", "minimax"]:
229
  response = f"{response}\n\n_[Translated using {source.title()}]_"
230
 
231
  return response
 
246
 
247
  response, source = ask_ai(prompt, temperature=0.3)
248
 
249
+ if source in ["cohere", "zai", "minimax"]:
250
  response = f"{response}\n\n_[Dictionary powered by {source.title()}]_"
251
 
252
  return response
 
315
  How to Improve:
316
  {fb['improvements']}"""
317
 
318
+ if source in ["cohere", "zai", "minimax"]:
319
  result += f"\n\n_[Graded by {source.title()}]_"
320
 
321
  return result
 
339
  pdf_text = extract_text_from_pdf(pdf_file)
340
  if pdf_text and not pdf_text.startswith("Error"):
341
  pdf_content_storage[paper_id] = pdf_text
342
+ content += f"\n\n[πŸ“„ PDF extracted: {len(pdf_text)} characters]"
343
 
344
  papers_storage.append({
345
  "id": paper_id,
 
356
  if not papers_storage:
357
  return "No papers yet."
358
  return "\n".join(
359
+ f"**{p['title']}** ({p['subject'].upper()} - {p['level']}) {' PDF' if p.get('has_pdf') else ''}\n {p['uploaded_at']}\n{p['content'][:120]}...\n{'─'*60}"
360
  for p in papers_storage
361
  )
362
 
 
374
  with gr.Blocks(theme=gr.themes.Soft(), title="IGCSE/GCSE Platform") as app:
375
  gr.Markdown("""
376
  # IGCSE/GCSE Language Learning Platform
377
+ Justice AI Tutor | Translator | Dictionary | Past Papers
378
+ _Powered by Gemini AI with intelligent fallback system_
379
  """)
380
 
381
  with gr.Tabs():
382
  # ───── STUDENT ─────
383
  with gr.Tab(" Student Portal"):
384
  with gr.Tabs():
385
+ with gr.Tab("Justice AI Tutor"):
386
+ gr.Markdown("### Chat with Your AI Tutor\n*Powered by Gemini 2.5 with automatic fallback*")
387
  with gr.Row():
388
  subj = gr.Radio(["French", "EFL"], label="Subject", value="French")
389
  lvl = gr.Radio(["IGCSE", "GCSE"], label="Level", value="IGCSE")