Ida commited on
Commit
4bf76eb
·
1 Parent(s): 0705bfe
Files changed (2) hide show
  1. app.py +58 -0
  2. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import sympy as sp
3
+ import torch
4
+ from transformers import AutoTokenizer, AutoModelForCausalLM
5
+
6
+ MODEL_ID = "Qwen/Qwen2.5-0.5B-Instruct"
7
+ SYSTEM_PROMPT = "You are a helpful tutor. Match the user's level."
8
+
9
+ tok = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True)
10
+ model = AutoModelForCausalLM.from_pretrained(
11
+ MODEL_ID,
12
+ torch_dtype=torch.float32, # CPU
13
+ device_map=None
14
+ )
15
+ model.eval()
16
+
17
+ def verify_math(expr_str: str) -> str:
18
+ try:
19
+ expr = sp.sympify(expr_str)
20
+ simplified = sp.simplify(expr)
21
+ return f"Simplified: ${sp.latex(simplified)}$"
22
+ except Exception as e:
23
+ return f"Could not verify with SymPy: {e}"
24
+
25
+ def generate(question: str, level: str, step_by_step: bool) -> str:
26
+ if not question.strip():
27
+ return "Please enter a question."
28
+ style = f"Level: {level}. {'Explain step-by-step.' if step_by_step else 'Be concise.'}"
29
+ prompt = f"System: {SYSTEM_PROMPT}\n{style}\nUser: {question}\nAssistant:"
30
+ inputs = tok(prompt, return_tensors="pt")
31
+ with torch.no_grad():
32
+ out = model.generate(
33
+ **inputs,
34
+ max_new_tokens=384,
35
+ do_sample=True,
36
+ temperature=0.7,
37
+ top_p=0.95,
38
+ pad_token_id=tok.eos_token_id
39
+ )
40
+ text = tok.decode(out[0], skip_special_tokens=True)
41
+ if "Assistant:" in text:
42
+ text = text.split("Assistant:", 1)[1].strip()
43
+ is_math = any(ch in question for ch in "+-*/=^") or question.lower().startswith(("simplify","derive","integrate"))
44
+ sympy_note = verify_math(question) if is_math else "No math verification needed."
45
+ return f"{text}\n\n---\n**SymPy check:** {sympy_note}\n_Status: Transformers CPU_"
46
+
47
+ def build_app():
48
+ with gr.Blocks(title="LearnLoop — CPU Space") as demo:
49
+ gr.Markdown("# LearnLoop — CPU-only demo")
50
+ q = gr.Textbox(label="Your question", placeholder="e.g., simplify (x^2 - 1)/(x - 1)")
51
+ level = gr.Dropdown(choices=["Beginner","Intermediate","Advanced"], value="Beginner", label="Level")
52
+ step = gr.Checkbox(value=True, label="Step-by-step")
53
+ btn = gr.Button("Explain"); out = gr.Markdown()
54
+ btn.click(generate, [q, level, step], out)
55
+ return demo
56
+
57
+ if __name__ == "__main__":
58
+ build_app().launch()
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ gradio>=4.44.0
2
+ sympy
3
+ transformers>=4.44.0
4
+ torch>=2.2.0
5
+ sentencepiece
6
+ safetensors