Chaitu2112 commited on
Commit
f3f182d
·
verified ·
1 Parent(s): ba2818e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -4
app.py CHANGED
@@ -1291,11 +1291,14 @@ def transcribe_video_bytes(video_bytes: bytes, whisper_model_name: str = "small"
1291
 
1292
  # generate MCQs from summary (reuse existing function if present)
1293
  def generate_mcqs_from_summary_local(summary: str, num_qs: int = 10, model: str = MODEL):
1294
- # Reuse the same approach as your Streamlit function generate_mcqs_from_summary
 
 
 
1295
  prompt = f"""
1296
  Generate {num_qs} distinct multiple-choice questions that cover the following summary.
1297
  For each question include:
1298
- - Exactly 4 labeled options A) B) C) D)
1299
  - A single-letter answer line like: Answer: <A/B/C/D>
1300
 
1301
  Use exactly this format; do not add extra commentary or code fences.
@@ -1310,9 +1313,13 @@ Answer: <A/B/C/D>
1310
  Summary:
1311
  {summary}
1312
  """
1313
- out = call_ollama(prompt, model=model, timeout=600)
1314
- if out.startswith("OLLAMA_ERROR"):
 
 
 
1315
  return [{"question": out, "options": [], "answer": ""}]
 
1316
  return parse_mcqs_freeform(out)
1317
 
1318
  # Endpoint: transcribe -> summarize (video)
 
1291
 
1292
  # generate MCQs from summary (reuse existing function if present)
1293
  def generate_mcqs_from_summary_local(summary: str, num_qs: int = 10, model: str = MODEL):
1294
+ """
1295
+ Generate MCQs from a text summary using the OpenRouter model
1296
+ via call_ollama().
1297
+ """
1298
  prompt = f"""
1299
  Generate {num_qs} distinct multiple-choice questions that cover the following summary.
1300
  For each question include:
1301
+ - Exactly 4 labeled options A) B) C) D)
1302
  - A single-letter answer line like: Answer: <A/B/C/D>
1303
 
1304
  Use exactly this format; do not add extra commentary or code fences.
 
1313
  Summary:
1314
  {summary}
1315
  """
1316
+ # 👇 OpenRouter call – no model/timeout args here
1317
+ out = call_ollama(prompt)
1318
+
1319
+ # Match the error format used in call_ollama()
1320
+ if out.startswith("LOCAL_MODEL_ERROR"):
1321
  return [{"question": out, "options": [], "answer": ""}]
1322
+
1323
  return parse_mcqs_freeform(out)
1324
 
1325
  # Endpoint: transcribe -> summarize (video)