OliverPerrin commited on
Commit
dc22a7b
·
1 Parent(s): 6dd5c4b

Fix: Remove DownloadButton to avoid Gradio schema bug

Browse files
Files changed (1) hide show
  1. scripts/demo_gradio.py +9 -18
scripts/demo_gradio.py CHANGED
@@ -124,14 +124,12 @@ def count_tokens(text: str) -> str:
124
 
125
 
126
  def predict(text: str):
127
- hidden_download = gr.update(value=None, visible=False)
128
  if not text or not text.strip():
129
  return (
130
  "Please enter text to analyze.",
131
  None,
132
  "No topic prediction available.",
133
  None,
134
- hidden_download,
135
  )
136
 
137
  try:
@@ -168,21 +166,11 @@ def predict(text: str):
168
  "Attention heatmap unavailable: summary was empty."
169
  )
170
 
171
- download_path = prepare_download(
172
- text,
173
- summary_source,
174
- emotions,
175
- topic,
176
- neural_summary=summary or None,
177
- fallback_summary=fallback_summary,
178
- )
179
- download_update = gr.update(value=download_path, visible=True)
180
-
181
- return summary_html, emotion_plot, topic_markdown, attention_fig, download_update
182
 
183
  except Exception as exc: # pragma: no cover - surfaced in UI
184
  logger.error("Prediction error: %s", exc, exc_info=True)
185
- return "Prediction failed. Check logs for details.", None, "Error", None, hidden_download
186
 
187
 
188
  def format_summary(original: str, summary: str, *, notice: str = "") -> str:
@@ -561,14 +549,11 @@ def create_interface() -> gr.Blocks:
561
  )
562
  gr.Markdown(initial_visual_status)
563
 
564
- gr.Markdown("### Download Results")
565
- download_btn = gr.DownloadButton("Download JSON", visible=False)
566
-
567
  input_text.change(fn=count_tokens, inputs=[input_text], outputs=[token_box])
568
  analyze_btn.click(
569
  fn=predict,
570
  inputs=[input_text],
571
- outputs=[summary_output, emotion_output, topic_output, attention_output, download_btn],
572
  )
573
  refresh_metrics.click(
574
  fn=load_metrics_report_as_markdown,
@@ -583,6 +568,12 @@ app = demo
583
 
584
 
585
  if __name__ == "__main__":
 
 
 
 
 
 
586
  try:
587
  get_pipeline()
588
  demo.queue().launch(
 
124
 
125
 
126
  def predict(text: str):
 
127
  if not text or not text.strip():
128
  return (
129
  "Please enter text to analyze.",
130
  None,
131
  "No topic prediction available.",
132
  None,
 
133
  )
134
 
135
  try:
 
166
  "Attention heatmap unavailable: summary was empty."
167
  )
168
 
169
+ return summary_html, emotion_plot, topic_markdown, attention_fig
 
 
 
 
 
 
 
 
 
 
170
 
171
  except Exception as exc: # pragma: no cover - surfaced in UI
172
  logger.error("Prediction error: %s", exc, exc_info=True)
173
+ return "Prediction failed. Check logs for details.", None, "Error", None
174
 
175
 
176
  def format_summary(original: str, summary: str, *, notice: str = "") -> str:
 
549
  )
550
  gr.Markdown(initial_visual_status)
551
 
 
 
 
552
  input_text.change(fn=count_tokens, inputs=[input_text], outputs=[token_box])
553
  analyze_btn.click(
554
  fn=predict,
555
  inputs=[input_text],
556
+ outputs=[summary_output, emotion_output, topic_output, attention_output],
557
  )
558
  refresh_metrics.click(
559
  fn=load_metrics_report_as_markdown,
 
568
 
569
 
570
  if __name__ == "__main__":
571
+ import os
572
+
573
+ # On HuggingFace Spaces, share must be False (they handle routing)
574
+ # but we need to ensure server binds correctly
575
+ is_hf_space = os.environ.get("SPACE_ID") is not None
576
+
577
  try:
578
  get_pipeline()
579
  demo.queue().launch(