ProjectConclusion / llm_interface.py
MasteredUltraInstinct's picture
Create llm_interface.py
66e7d83 verified
raw
history blame contribute delete
679 Bytes
# llm_interface.py
import requests
from llm_utils import build_prompt
EXPLAIN_API_URL = "http://<your-colab-url>/explain" # ← Replace this with your actual backend URL
def explain_with_llm(latex_str):
if not latex_str.strip():
return "⚠️ No LaTeX input provided."
prompt = build_prompt(latex_str)
try:
response = requests.post(EXPLAIN_API_URL, json={"latex": prompt})
if response.status_code == 200:
return response.json().get("explanation", "No explanation returned.")
else:
return f"❌ Error: {response.status_code} - {response.text}"
except Exception as e:
return f"❌ Exception: {e}"