newtechdevng commited on
Commit
5dafbcb
Β·
verified Β·
1 Parent(s): df9191a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -8
app.py CHANGED
@@ -3,15 +3,15 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch
4
 
5
  MODEL_ID = "newtechdevng/math-tutor-smollm2-360M"
6
- BASE_ID = "HuggingFaceTB/SmolLM2-360M-Instruct" # tokenizer source
7
 
8
- print("Loading tokenizer from base model...")
9
- tokenizer = AutoTokenizer.from_pretrained(BASE_ID) # ← fix: load from base
10
 
11
- print("Loading fine-tuned model...")
12
  model = AutoModelForCausalLM.from_pretrained(
13
  MODEL_ID,
14
- torch_dtype=torch.float32,
15
  device_map="cpu"
16
  )
17
  model.eval()
@@ -25,6 +25,7 @@ SYSTEM_PROMPT = (
25
  def solve(question, class_level, history):
26
  if not question.strip():
27
  return history, ""
 
28
  prompt = (
29
  "<|im_start|>system\n"
30
  + SYSTEM_PROMPT
@@ -45,10 +46,13 @@ def solve(question, class_level, history):
45
  reply = tokenizer.decode(outputs[0], skip_special_tokens=False)
46
  answer = reply.split("<|im_start|>assistant\n")[-1]
47
  answer = answer.replace("<|im_end|>", "").strip()
48
- history.append((question, answer))
 
 
 
49
  return history, ""
50
 
51
- with gr.Blocks(title="Math Tutor Class 6-10", theme=gr.themes.Soft()) as app:
52
  gr.Markdown("# πŸŽ“ Math Tutor β€” Class 6 to 10")
53
  gr.Markdown("Ask any math question and get step-by-step solutions!")
54
  with gr.Row():
@@ -56,7 +60,8 @@ with gr.Blocks(title="Math Tutor Class 6-10", theme=gr.themes.Soft()) as app:
56
  choices=["6", "7", "8", "9", "10"],
57
  value="8", label="Select Class", scale=1
58
  )
59
- chatbot = gr.Chatbot(label="Math Solutions", height=450)
 
60
  with gr.Row():
61
  question = gr.Textbox(
62
  label="Your Question",
 
3
  import torch
4
 
5
  MODEL_ID = "newtechdevng/math-tutor-smollm2-360M"
6
+ BASE_ID = "HuggingFaceTB/SmolLM2-360M-Instruct"
7
 
8
+ print("Loading tokenizer...")
9
+ tokenizer = AutoTokenizer.from_pretrained(BASE_ID)
10
 
11
+ print("Loading model...")
12
  model = AutoModelForCausalLM.from_pretrained(
13
  MODEL_ID,
14
+ torch_dtype=torch.float16,
15
  device_map="cpu"
16
  )
17
  model.eval()
 
25
  def solve(question, class_level, history):
26
  if not question.strip():
27
  return history, ""
28
+
29
  prompt = (
30
  "<|im_start|>system\n"
31
  + SYSTEM_PROMPT
 
46
  reply = tokenizer.decode(outputs[0], skip_special_tokens=False)
47
  answer = reply.split("<|im_start|>assistant\n")[-1]
48
  answer = answer.replace("<|im_end|>", "").strip()
49
+
50
+ # βœ… Gradio 6 format β€” dict with role and content
51
+ history.append({"role": "user", "content": question})
52
+ history.append({"role": "assistant", "content": answer})
53
  return history, ""
54
 
55
+ with gr.Blocks(title="Math Tutor Class 6-10") as app:
56
  gr.Markdown("# πŸŽ“ Math Tutor β€” Class 6 to 10")
57
  gr.Markdown("Ask any math question and get step-by-step solutions!")
58
  with gr.Row():
 
60
  choices=["6", "7", "8", "9", "10"],
61
  value="8", label="Select Class", scale=1
62
  )
63
+ # βœ… Gradio 6 chatbot β€” type="messages"
64
+ chatbot = gr.Chatbot(label="Math Solutions", height=450, type="messages")
65
  with gr.Row():
66
  question = gr.Textbox(
67
  label="Your Question",