File size: 679 Bytes
66e7d83
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# llm_interface.py
import requests
from llm_utils import build_prompt

EXPLAIN_API_URL = "http://<your-colab-url>/explain"  # ← Replace this with your actual backend URL

def explain_with_llm(latex_str):
    if not latex_str.strip():
        return "⚠️ No LaTeX input provided."
    prompt = build_prompt(latex_str)
    try:
        response = requests.post(EXPLAIN_API_URL, json={"latex": prompt})
        if response.status_code == 200:
            return response.json().get("explanation", "No explanation returned.")
        else:
            return f"❌ Error: {response.status_code} - {response.text}"
    except Exception as e:
        return f"❌ Exception: {e}"