kaurm43 commited on
Commit
d4e4a69
·
verified ·
1 Parent(s): ba0dd11

Update PolyAgent/gradio_interface.py

Browse files
Files changed (1) hide show
  1. PolyAgent/gradio_interface.py +61 -1
PolyAgent/gradio_interface.py CHANGED
@@ -27,6 +27,18 @@ except Exception as e:
27
  )
28
 
29
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  # =============================================================================
31
  # DOI NORMALIZATION HELPERS
32
  # =============================================================================
@@ -46,6 +58,13 @@ def normalize_doi(raw: str) -> Optional[str]:
46
  def doi_to_url(doi: str) -> str:
47
  return f"https://doi.org/{doi}"
48
 
 
 
 
 
 
 
 
49
  # -----------------------------------------------------------------------------
50
  # Console defaults
51
  # -----------------------------------------------------------------------------
@@ -1301,6 +1320,15 @@ def build_ui() -> gr.Blocks:
1301
  with gr.Row():
1302
  with gr.Column(scale=1):
1303
  gr.Markdown("### Questions")
 
 
 
 
 
 
 
 
 
1304
  questions = gr.Textbox(
1305
  label="Ask your questions",
1306
  lines=16,
@@ -1325,6 +1353,19 @@ def build_ui() -> gr.Blocks:
1325
  outputs=[final_answer, ev_imgs],
1326
  )
1327
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1328
  with gr.Tab("Tools"):
1329
  gr.Markdown("Run individual tools for debugging/ad-hoc usage. Visuals are PNG-only.")
1330
 
@@ -1397,6 +1438,13 @@ def build_ui() -> gr.Blocks:
1397
  with gr.Tab("Other LLMs"):
1398
  gr.Markdown("Run a direct LLM-only response (no tools, no web search) using a non-GPT model name.")
1399
 
 
 
 
 
 
 
 
1400
  llm_model = gr.Dropdown(
1401
  label="Model",
1402
  choices=["mixtral-8x22b-instruct", "llama-3.1-8b-instruct"],
@@ -1407,6 +1455,18 @@ def build_ui() -> gr.Blocks:
1407
  llm_out = gr.Markdown("The model response will appear here.")
1408
  llm_btn.click(fn=llm_only_answer, inputs=[state, llm_model, llm_prompt], outputs=[llm_out])
1409
 
 
 
 
 
 
 
 
 
 
 
 
 
1410
  return demo
1411
 
1412
 
@@ -1421,4 +1481,4 @@ def main():
1421
 
1422
 
1423
  if __name__ == "__main__":
1424
- main()
 
27
  )
28
 
29
 
30
+ # -----------------------------------------------------------------------------
31
+ # Default cases
32
+ # -----------------------------------------------------------------------------
33
+ DEFAULT_CONSOLE_CASE_PREDICT_TG = (
34
+ "Predict the glass transition temperature (Tg) for the following PSMILES.\n"
35
+ "seed_psmiles: [*]CC(=O)OCCOCCOC(=O)C[*]\n"
36
+ )
37
+
38
+ DEFAULT_CONSOLE_CASE_GENERATE_TG = (
39
+ "Generate four candidate polymers targeting Tg 60 (°C) while keeping melt-processability practical.\n"
40
+ "seed_psmiles: [*]CC(=O)OCCOCCOC(=O)C[*]\n"
41
+ )
42
  # =============================================================================
43
  # DOI NORMALIZATION HELPERS
44
  # =============================================================================
 
58
  def doi_to_url(doi: str) -> str:
59
  return f"https://doi.org/{doi}"
60
 
61
+ def _get_console_preset_text(preset_name: str) -> str:
62
+ if preset_name == "Predict Tg (given pSMILES)":
63
+ return DEFAULT_CONSOLE_CASE_PREDICT_TG
64
+ if preset_name == "Inverse design (target Tg)":
65
+ return DEFAULT_CONSOLE_CASE_GENERATE_TG
66
+ return DEFAULT_CONSOLE_CASE_PREDICT_TG
67
+
68
  # -----------------------------------------------------------------------------
69
  # Console defaults
70
  # -----------------------------------------------------------------------------
 
1320
  with gr.Row():
1321
  with gr.Column(scale=1):
1322
  gr.Markdown("### Questions")
1323
+
1324
+ # --- PRESET BUTTONS ---
1325
+ with gr.Row():
1326
+ btn_preset_predict = gr.Button("Load preset: Predict Tg", size="sm")
1327
+ btn_preset_generate = gr.Button(
1328
+ "Load preset: Inverse design (Tg target)", size="sm"
1329
+ )
1330
+ # ------------------------------
1331
+
1332
  questions = gr.Textbox(
1333
  label="Ask your questions",
1334
  lines=16,
 
1353
  outputs=[final_answer, ev_imgs],
1354
  )
1355
 
1356
+ # --- PRESET HANDLERS ---
1357
+ btn_preset_predict.click(
1358
+ fn=lambda: DEFAULT_CONSOLE_CASE_PREDICT_TG,
1359
+ inputs=[],
1360
+ outputs=[questions],
1361
+ )
1362
+ btn_preset_generate.click(
1363
+ fn=lambda: DEFAULT_CONSOLE_CASE_GENERATE_TG,
1364
+ inputs=[],
1365
+ outputs=[questions],
1366
+ )
1367
+ # -------------------------------
1368
+
1369
  with gr.Tab("Tools"):
1370
  gr.Markdown("Run individual tools for debugging/ad-hoc usage. Visuals are PNG-only.")
1371
 
 
1438
  with gr.Tab("Other LLMs"):
1439
  gr.Markdown("Run a direct LLM-only response (no tools, no web search) using a non-GPT model name.")
1440
 
1441
+ with gr.Row():
1442
+ btn_llm_preset_predict = gr.Button("Load preset: Predict Tg", size="sm")
1443
+ btn_llm_preset_generate = gr.Button(
1444
+ "Load preset: Inverse design (Tg target)", size="sm"
1445
+ )
1446
+ # ------------------------------
1447
+
1448
  llm_model = gr.Dropdown(
1449
  label="Model",
1450
  choices=["mixtral-8x22b-instruct", "llama-3.1-8b-instruct"],
 
1455
  llm_out = gr.Markdown("The model response will appear here.")
1456
  llm_btn.click(fn=llm_only_answer, inputs=[state, llm_model, llm_prompt], outputs=[llm_out])
1457
 
1458
+ btn_llm_preset_predict.click(
1459
+ fn=lambda: DEFAULT_CONSOLE_CASE_PREDICT_TG,
1460
+ inputs=[],
1461
+ outputs=[llm_prompt],
1462
+ )
1463
+ btn_llm_preset_generate.click(
1464
+ fn=lambda: DEFAULT_CONSOLE_CASE_GENERATE_TG,
1465
+ inputs=[],
1466
+ outputs=[llm_prompt],
1467
+ )
1468
+ # -------------------------------
1469
+
1470
  return demo
1471
 
1472
 
 
1481
 
1482
 
1483
  if __name__ == "__main__":
1484
+ main()