studyOverflow commited on
Commit
cf6f941
·
verified ·
1 Parent(s): e4df8aa

fix: show 5-segment prompt (level_3) with segment headers

Browse files
Files changed (1) hide show
  1. app.py +16 -5
app.py CHANGED
@@ -67,12 +67,23 @@ TASK_BY_ID: dict[str, dict[str, Any]] = {t["task_id"]: t for t in TASKS}
67
 
68
 
69
  def _extract_prompt(task: dict[str, Any]) -> str:
 
 
 
 
 
 
70
  gp = task.get("generation_prompts") or {}
71
  prompts = gp.get("prompts") or {}
72
- for level in ("level_1", "level_2", "level_3"):
 
73
  val = prompts.get(level)
74
  if isinstance(val, list) and val:
75
- return val[0]
 
 
 
 
76
  if isinstance(val, str) and val:
77
  return val
78
  return "(no prompt found)"
@@ -367,9 +378,9 @@ with gr.Blocks(title="MBench-V Annotation", theme=gr.themes.Soft()) as demo:
367
  with gr.Column(scale=2):
368
  meta_md = gr.Markdown()
369
  prompt_tb = gr.Textbox(
370
- label="Generation prompt",
371
- lines=10,
372
- max_lines=20,
373
  interactive=False,
374
  )
375
  with gr.Column(scale=1):
 
67
 
68
 
69
  def _extract_prompt(task: dict[str, Any]) -> str:
70
+ """Return the 5-segment prompt (level_3), joined with segment headers.
71
+
72
+ All MBench-V models were actually fed the 5-segment (level_3) prompts at
73
+ inference time, so this is what annotators should see. If level_3 is
74
+ missing for some reason we fall back to the finest available level.
75
+ """
76
  gp = task.get("generation_prompts") or {}
77
  prompts = gp.get("prompts") or {}
78
+ # Prefer the canonical 5-segment split; otherwise fall back gracefully.
79
+ for level in ("level_3", "level_4", "level_2", "level_1"):
80
  val = prompts.get(level)
81
  if isinstance(val, list) and val:
82
+ n = len(val)
83
+ parts = []
84
+ for i, seg in enumerate(val, 1):
85
+ parts.append(f"— Segment {i}/{n} —\n{seg}")
86
+ return "\n\n".join(parts)
87
  if isinstance(val, str) and val:
88
  return val
89
  return "(no prompt found)"
 
378
  with gr.Column(scale=2):
379
  meta_md = gr.Markdown()
380
  prompt_tb = gr.Textbox(
381
+ label="Generation prompt (5 segments)",
382
+ lines=20,
383
+ max_lines=40,
384
  interactive=False,
385
  )
386
  with gr.Column(scale=1):