File size: 1,514 Bytes
719475c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import gradio as gr, torch, json
from tokenizers import Tokenizer
from model.tiny_gpt2 import TinyGPT2, GPTConfig

TOK_PATH = "out/tokenizer.json"
CFG_PATH = "out/pretrain/gpt_config.json"
CKPT_PATH = "out/sft/model_sft.pt"

tok = Tokenizer.from_file(TOK_PATH)
cfg = GPTConfig(**json.load(open(CFG_PATH)))
model = TinyGPT2(cfg)
model.load_state_dict(torch.load(CKPT_PATH, map_location="cpu"))
model.eval()

def generate_reply(prompt, max_new_tokens=96, top_k=40, top_p=0.9, temperature=0.8):
    ids = tok.encode("[BOS] " + prompt).ids
    x = torch.tensor([ids], dtype=torch.long)
    with torch.no_grad():
        y = model.generate(x, max_new_tokens=int(max_new_tokens), top_k=int(top_k),
                           top_p=float(top_p), temperature=float(temperature))
    text = tok.decode(y[0].tolist())
    return text.replace("[BOS]", "").split("[EOS]")[0].strip()

demo = gr.Interface(
    fn=generate_reply,
    inputs=[
        gr.Textbox(label="Ask an IB Physics HL question"),
        gr.Slider(16, 256, value=96, step=1, label="Max new tokens"),
        gr.Slider(0, 100, value=40, step=1, label="top_k"),
        gr.Slider(0.5, 1.0, value=0.9, step=0.01, label="top_p"),
        gr.Slider(0.2, 1.5, value=0.8, step=0.05, label="temperature"),
    ],
    outputs=gr.Textbox(label="MiniGPT Answer"),
    title="IB-Physics-Mini-GPT (from scratch)",
    description="Tiny educational LLM trained from scratch + light SFT on physics definitions & short Q&A."
)
if __name__ == "__main__":
    demo.launch()