OpeneR Sisyphus commited on
Commit
c27b609
·
1 Parent(s): 778278c

Remove Gradio runtime and use FastAPI-only backend

Browse files

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>

Files changed (3) hide show
  1. app.py +4 -89
  2. custom_web.py +5 -3
  3. requirements.txt +2 -1
app.py CHANGED
@@ -19,8 +19,6 @@ from urllib.error import HTTPError, URLError
19
  from urllib.parse import quote, urlparse
20
  from urllib.request import Request, urlopen
21
 
22
- import gradio as gr
23
-
24
  from hydradeck.clients import ChatMessage, GrokClient
25
  from hydradeck.config import resolve_api_key, resolve_base_url, resolve_model
26
  from hydradeck.core.types import RunConfig
@@ -308,7 +306,7 @@ def _run_agentic_pipeline(
308
  api_key: str,
309
  request_budget: float,
310
  use_mock: bool,
311
- progress: gr.Progress = gr.Progress(),
312
  stage_callback=None,
313
  language: str = "en",
314
  stage_models: dict[str, str] | None = None,
@@ -328,7 +326,8 @@ def _run_agentic_pipeline(
328
 
329
  def mark(step: int, label: str, detail: str) -> None:
330
  pct = min(max(step / total_steps, 0.0), 1.0)
331
- _ = progress(pct, desc=label)
 
332
  stage_logs.append(f"{step}/{total_steps} {label}: {detail}")
333
 
334
  def emit_stage(
@@ -994,7 +993,7 @@ def _run_agentic_pipeline_stream(
994
  api_key,
995
  request_budget,
996
  use_mock,
997
- gr.Progress(),
998
  on_stage,
999
  )
1000
  wait_tick = 0
@@ -1183,87 +1182,3 @@ def _run_pipeline(
1183
  copy_zip.write_bytes(out_zip.read_bytes())
1184
  status = f"Done. Output zip: {copy_zip}"
1185
  return status, report_md, paper_tex, slides_tex
1186
-
1187
-
1188
- with gr.Blocks(title="hydradeck WebUI") as demo:
1189
- gr.Markdown("# hydradeck WebUI\nRun deep-research and export paper/slides tex.")
1190
- with gr.Row():
1191
- topic = gr.Textbox(label="Topic", value="RynnBrain technical report")
1192
- model = gr.Textbox(label="Model", value="grok-4")
1193
- with gr.Row():
1194
- base_url = gr.Textbox(label="Base URL", value="https://api.example.com")
1195
- api_key = gr.Textbox(label="API Key", type="password", value="")
1196
- with gr.Row():
1197
- max_sources = gr.Number(label="Max sources", value=6, precision=0)
1198
- iterations = gr.Number(label="Iterations", value=1, precision=0)
1199
- llm_timeout = gr.Number(label="LLM timeout (s)", value=90)
1200
- request_budget = gr.Number(label="Request budget (s)", value=35)
1201
- seed_urls = gr.Textbox(
1202
- label="Seed URLs (one per line)",
1203
- value="https://github.com/alibaba-damo-academy/RynnBrain\nhttps://arxiv.org",
1204
- lines=4,
1205
- )
1206
- use_mock = gr.Checkbox(label="Use mock (offline)", value=False)
1207
-
1208
- check_btn = gr.Button("Quick API Check")
1209
- run_btn = gr.Button("Run Full Pipeline")
1210
- run_agentic_btn = gr.Button("Run Agentic Pipeline")
1211
- status = gr.Textbox(label="Status")
1212
- progress_pct = gr.Slider(label="Progress (%)", minimum=0, maximum=100, step=1, value=0, interactive=False)
1213
- progress_log = gr.Textbox(label="Agent Progress", lines=10)
1214
- scope_json = gr.Textbox(label="Scope (Agent-1)", lines=10)
1215
- section_plan_json = gr.Textbox(label="Section Plan (Agent-2)", lines=10)
1216
- report_md = gr.Textbox(label="report.md", lines=14)
1217
- paper_tex = gr.Textbox(label="paper.tex", lines=14)
1218
- slides_tex = gr.Textbox(label="slides.tex", lines=14)
1219
- rendered_pdfs = gr.Textbox(label="Rendered PDF Paths", lines=2)
1220
- paper_pdf_file = gr.Textbox(label="paper.pdf path", lines=1)
1221
- slides_pdf_file = gr.Textbox(label="slides.pdf path", lines=1)
1222
-
1223
- check_btn.click(
1224
- _api_quick_check,
1225
- [base_url, api_key, model, request_budget],
1226
- [status],
1227
- queue=False,
1228
- )
1229
-
1230
- run_btn.click(
1231
- _run_pipeline,
1232
- [
1233
- topic,
1234
- model,
1235
- base_url,
1236
- api_key,
1237
- max_sources,
1238
- iterations,
1239
- llm_timeout,
1240
- request_budget,
1241
- seed_urls,
1242
- use_mock,
1243
- ],
1244
- [status, report_md, paper_tex, slides_tex],
1245
- queue=False,
1246
- )
1247
-
1248
- run_agentic_btn.click(
1249
- _run_agentic_pipeline_stream,
1250
- [topic, model, base_url, api_key, request_budget, use_mock],
1251
- [
1252
- status,
1253
- progress_log,
1254
- scope_json,
1255
- section_plan_json,
1256
- paper_tex,
1257
- slides_tex,
1258
- rendered_pdfs,
1259
- paper_pdf_file,
1260
- slides_pdf_file,
1261
- progress_pct,
1262
- ],
1263
- queue=True,
1264
- )
1265
-
1266
-
1267
- if __name__ == "__main__":
1268
- demo.queue(default_concurrency_limit=2)
1269
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
19
  from urllib.parse import quote, urlparse
20
  from urllib.request import Request, urlopen
21
 
 
 
22
  from hydradeck.clients import ChatMessage, GrokClient
23
  from hydradeck.config import resolve_api_key, resolve_base_url, resolve_model
24
  from hydradeck.core.types import RunConfig
 
306
  api_key: str,
307
  request_budget: float,
308
  use_mock: bool,
309
+ progress=None,
310
  stage_callback=None,
311
  language: str = "en",
312
  stage_models: dict[str, str] | None = None,
 
326
 
327
  def mark(step: int, label: str, detail: str) -> None:
328
  pct = min(max(step / total_steps, 0.0), 1.0)
329
+ if callable(progress):
330
+ _ = progress(pct, desc=label)
331
  stage_logs.append(f"{step}/{total_steps} {label}: {detail}")
332
 
333
  def emit_stage(
 
993
  api_key,
994
  request_budget,
995
  use_mock,
996
+ None,
997
  on_stage,
998
  )
999
  wait_tick = 0
 
1182
  copy_zip.write_bytes(out_zip.read_bytes())
1183
  status = f"Done. Output zip: {copy_zip}"
1184
  return status, report_md, paper_tex, slides_tex
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
custom_web.py CHANGED
@@ -7,9 +7,10 @@ import uuid
7
  from pathlib import Path
8
  from typing import Any
9
 
 
 
10
  from fastapi import FastAPI, HTTPException
11
  from fastapi.responses import FileResponse, HTMLResponse
12
- import gradio as gr
13
  from pydantic import BaseModel
14
 
15
  from app import _api_quick_check, _run_agentic_pipeline
@@ -167,7 +168,7 @@ def _run_job(job_id: str, req: RunRequest) -> None:
167
  api_key=req.api_key,
168
  request_budget=req.request_budget,
169
  use_mock=req.use_mock,
170
- progress=gr.Progress(),
171
  stage_callback=on_stage,
172
  language=req.language,
173
  stage_models={
@@ -544,4 +545,5 @@ if __name__ == "__main__":
544
  import uvicorn
545
 
546
  _load_state()
547
- uvicorn.run(app, host="0.0.0.0", port=7861)
 
 
7
  from pathlib import Path
8
  from typing import Any
9
 
10
+ import os
11
+
12
  from fastapi import FastAPI, HTTPException
13
  from fastapi.responses import FileResponse, HTMLResponse
 
14
  from pydantic import BaseModel
15
 
16
  from app import _api_quick_check, _run_agentic_pipeline
 
168
  api_key=req.api_key,
169
  request_budget=req.request_budget,
170
  use_mock=req.use_mock,
171
+ progress=None,
172
  stage_callback=on_stage,
173
  language=req.language,
174
  stage_models={
 
545
  import uvicorn
546
 
547
  _load_state()
548
+ port = int(os.getenv("PORT", "7861"))
549
+ uvicorn.run(app, host="0.0.0.0", port=port)
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  requests>=2.31.0
2
  urllib3>=2,<3
3
- gradio>=4.44.1,<5
4
  huggingface_hub<1.0
 
 
 
1
  requests>=2.31.0
2
  urllib3>=2,<3
 
3
  huggingface_hub<1.0
4
+ fastapi>=0.111,<1
5
+ uvicorn[standard]>=0.30,<1