kaurm43 commited on
Commit
2ec3010
·
verified ·
1 Parent(s): 57dd353

Update PolyAgent/gradio_interface.py

Browse files
Files changed (1) hide show
  1. PolyAgent/gradio_interface.py +18 -14
PolyAgent/gradio_interface.py CHANGED
@@ -1201,7 +1201,7 @@ def gpt_only_answer(state: Dict[str, Any], prompt: str) -> str:
1201
  # ----------------------------- Other LLMs (Hugging Face Inference) ----------------------------- #
1202
  def llm_only_answer(state: Dict[str, Any], model_name: str, prompt: str) -> str:
1203
  """
1204
- LLM-only responses using Hugging Face Inference API for non-GPT models.
1205
  """
1206
  ensure_orch(state)
1207
 
@@ -1242,22 +1242,26 @@ def llm_only_answer(state: Dict[str, Any], model_name: str, prompt: str) -> str:
1242
 
1243
  client = InferenceClient(model=model_id, token=HF_TOKEN)
1244
 
 
 
 
 
 
 
 
 
 
 
 
1245
  try:
1246
- resp = client.chat_completion(
1247
- messages=[
1248
- {
1249
- "role": "system",
1250
- "content": (
1251
- "You are a polymer R&D assistant. Answer directly and clearly. "
1252
- "Do not call tools or run web searches. If you are uncertain, state uncertainty."
1253
- ),
1254
- },
1255
- {"role": "user", "content": p},
1256
- ],
1257
- max_tokens=900,
1258
  temperature=0.7,
 
 
1259
  )
1260
- return resp.choices[0].message.content or ""
1261
  except Exception as e:
1262
  return pretty_json({"ok": False, "error": str(e), "model_id": model_id})
1263
 
 
1201
  # ----------------------------- Other LLMs (Hugging Face Inference) ----------------------------- #
1202
  def llm_only_answer(state: Dict[str, Any], model_name: str, prompt: str) -> str:
1203
  """
1204
+ LLM-only responses using Hugging Face Inference API for non-chat models.
1205
  """
1206
  ensure_orch(state)
1207
 
 
1242
 
1243
  client = InferenceClient(model=model_id, token=HF_TOKEN)
1244
 
1245
+ # 🔑 Manually format instruction prompt
1246
+ system_prompt = (
1247
+ "You are a polymer R&D assistant. "
1248
+ "Answer directly and clearly. "
1249
+ "Do not call tools or run web searches. "
1250
+ "If you are uncertain, state uncertainty."
1251
+ )
1252
+
1253
+ full_prompt = f"""[INST] {system_prompt}
1254
+ {p} [/INST]"""
1255
+
1256
  try:
1257
+ resp = client.text_generation(
1258
+ full_prompt,
1259
+ max_new_tokens=900,
 
 
 
 
 
 
 
 
 
1260
  temperature=0.7,
1261
+ do_sample=True,
1262
+ return_full_text=False,
1263
  )
1264
+ return resp.strip()
1265
  except Exception as e:
1266
  return pretty_json({"ok": False, "error": str(e), "model_id": model_id})
1267