ProjectConclusion / llm_utils.py
MasteredUltraInstinct's picture
Upload 2 files
7b2126f verified
import requests
import yaml
# === Load theorems.yaml and render it into clean context text for LLM ===
def load_theorem_context(yaml_path="theorems.yaml"):
with open(yaml_path, 'r') as f:
data = yaml.safe_load(f)
if not isinstance(data, dict):
return "โš ๏ธ Invalid theorems format in YAML."
context_lines = []
for idx, th in enumerate(data.get('theorems', []), 1):
context_lines.append(
f"**Theorem {idx}: {th.get('name', 'Unnamed')}**\n"
f"- **Statement**: {th.get('statement', 'N/A')}\n"
f"- **Tags**: {', '.join(th.get('tags', []))}\n"
f"- **When to Use**: {th.get('when_to_use', 'N/A')}\n"
f"- **Short Explanation**: {th.get('short_explanation', 'N/A')}\n"
)
context_lines.append('---')
return "\n".join(context_lines)
# === Build prompt using user steps and theorem context ===
def build_prompt(equation_type, solution_text, theorem_context):
return (
f"You are a helpful math tutor. Below is a theorem reference database.\n\n"
f"Each theorem includes:\n"
f"- Name\n- Statement\n- Tags\n- When to use\n- Short Explanation\n\n"
f"---\n\n"
f"### ๐Ÿ“˜ Theorem Database:\n\n"
f"{theorem_context}\n\n"
f"---\n\n"
f"### ๐Ÿงฎ User Steps for solving a {equation_type} equation:\n\n"
f"{solution_text}\n\n"
f"---\n\n"
f"### ๐ŸŽฏ Task:\n"
f"Explain each solution step clearly.\n"
f"Use relevant theorems by number or name.\n"
f"Make it understandable to a smart high school student.\n"
f"Focus on reasoning, not just restating the steps or theorems."
)
# === Request LLM explanation ===
def explain_with_llm(solution_text, equation_type, llm_url, yaml_path="theorems.yaml"):
try:
if not llm_url or not llm_url.strip().startswith("http"):
return "โŒ Invalid or missing LLM URL."
theorem_context = load_theorem_context(yaml_path)
prompt = build_prompt(equation_type, solution_text, theorem_context)
response = requests.post(
f"{llm_url.strip()}/explain",
json={"prompt": prompt},
timeout=90
)
if response.status_code == 200:
result = response.json()
if isinstance(result, dict):
return result.get("explanation", "โŒ No explanation returned.")
elif isinstance(result, list):
return result[0] if result else "โŒ Empty response list."
else:
return f"โŒ Unexpected LLM response format: {type(result)}"
return f"โŒ LLM request failed: {response.status_code}"
except Exception as e:
return f"โŒ LLM Error: {e}"
# === Request fallback if parsing failed ===
def request_llm_fallback(bad_input, llm_url):
try:
response = requests.post(
f"{llm_url.strip()}/clean",
json={"prompt": bad_input},
timeout=20
)
result = response.json()
if isinstance(result, dict):
return result.get("cleaned_latex", bad_input)
return bad_input
except:
return bad_input