george2cool36 commited on
Commit
f97f599
·
verified ·
1 Parent(s): f005f2b

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +19 -7
app.py CHANGED
@@ -206,24 +206,36 @@ def _format_chat(system_prompt: str, user_prompt: str) -> str:
206
  return _tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
207
 
208
  def llm_explain(structured_message: str) -> str:
209
- # Graceful fallback
210
  if (not _USE_LLM) or (_tokenizer is None) or (_pipe is None):
211
  try:
212
  d = json.loads(structured_message)
213
- so = d["verdicts"]["strength_ok"]
214
- sv = d["verdicts"]["service_ok"]
215
- s_msg = "OK" if so else "NOT OK"
216
- v_msg = "OK" if sv else "NOT OK"
217
- return f"Like a sturdy spatula holding a pancake: strength is {s_msg} and deflection (L/180) is {v_msg}."
 
 
 
 
 
 
 
 
 
 
218
  except Exception:
219
  return "Quick take: strength and deflection (L/180) checks computed; see the table and math."
 
 
220
  system_prompt = (
221
  "You explain engineering to a smart 5-year-old using a quick food analogy. "
222
  "You ALWAYS respond in exactly ONE friendly sentence."
223
  )
224
  user_prompt = (
225
  "Here are the inputs, formulas, and results for a cantilever beam calculation.\n"
226
- "Summarize whether the beam is OK in strength and deflection.\n\n"
227
  + structured_message
228
  )
229
  formatted = _format_chat(system_prompt, user_prompt)
 
206
  return _tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
207
 
208
  def llm_explain(structured_message: str) -> str:
209
+ # Deterministic, number-rich fallback
210
  if (not _USE_LLM) or (_tokenizer is None) or (_pipe is None):
211
  try:
212
  d = json.loads(structured_message)
213
+ r = d["results"]
214
+ v = d["verdicts"]
215
+ inp = d["inputs_SI"]
216
+ L = float(inp["L_m"])
217
+ Sy = float(inp["Sy_MPa"])
218
+ sigma = float(r["sigma_max_MPa"])
219
+ delta = float(r["delta_mm"])
220
+ delta_allow = L/180.0*1e3 # mm
221
+
222
+ s_msg = "OK" if v["strength_ok"] else "NOT OK"
223
+ d_msg = "OK" if v["service_ok"] else "NOT OK"
224
+ return (
225
+ f"Strength {s_msg} (σ_max={sigma:.2f} MPa vs Sy={Sy:.0f} MPa); "
226
+ f"deflection {d_msg} (δ={delta:.2f} mm vs L/180={delta_allow:.2f} mm)."
227
+ )
228
  except Exception:
229
  return "Quick take: strength and deflection (L/180) checks computed; see the table and math."
230
+
231
+ # LLM path (only if model actually loads)
232
  system_prompt = (
233
  "You explain engineering to a smart 5-year-old using a quick food analogy. "
234
  "You ALWAYS respond in exactly ONE friendly sentence."
235
  )
236
  user_prompt = (
237
  "Here are the inputs, formulas, and results for a cantilever beam calculation.\n"
238
+ "Summarize whether the beam is OK in strength and deflection, with one short food analogy.\n\n"
239
  + structured_message
240
  )
241
  formatted = _format_chat(system_prompt, user_prompt)