Scaryscar commited on
Commit
8ae2e20
·
verified ·
1 Parent(s): f7970dc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -4
app.py CHANGED
@@ -1,8 +1,46 @@
1
  import os
 
 
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
 
4
- model_id = "your-username/my-wizardmath-finetuned"
5
- hf_token = os.getenv("HF_TOKEN")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
- tokenizer = AutoTokenizer.from_pretrained(model_id, token=hf_token)
8
- model = AutoModelForCausalLM.from_pretrained(model_id, token=hf_token)
 
1
  import os
2
+ import gradio as gr
3
+ import torch
4
  from transformers import AutoTokenizer, AutoModelForCausalLM
5
 
6
+ # Replace this with your actual model repo ID on Hugging Face
7
+ MODEL_ID = "your-username/wizardmath-finetuned"
8
+
9
+ # If your model is private, provide your Hugging Face token via environment variable
10
+ HF_TOKEN = os.getenv("HF_TOKEN", None)
11
+
12
+ # Load tokenizer and model (use token if private)
13
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, use_auth_token=HF_TOKEN)
14
+ model = AutoModelForCausalLM.from_pretrained(MODEL_ID, use_auth_token=HF_TOKEN,
15
+ torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
16
+ device_map="auto")
17
+
18
+ def generate_answer(prompt, max_new_tokens=256, temperature=0.7, top_p=0.9):
19
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
20
+ with torch.no_grad():
21
+ outputs = model.generate(
22
+ **inputs,
23
+ max_new_tokens=max_new_tokens,
24
+ temperature=temperature,
25
+ top_p=top_p,
26
+ do_sample=True,
27
+ pad_token_id=tokenizer.eos_token_id
28
+ )
29
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
30
+
31
+ iface = gr.Interface(
32
+ fn=generate_answer,
33
+ inputs=[
34
+ gr.Textbox(lines=4, label="Prompt"),
35
+ gr.Slider(50, 1024, value=256, step=1, label="Max new tokens"),
36
+ gr.Slider(0.1, 1.0, value=0.7, step=0.05, label="Temperature"),
37
+ gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p"),
38
+ ],
39
+ outputs=gr.Textbox(label="Response"),
40
+ title="WizardMath Fine-Tuned Model",
41
+ description="Ask math questions to your fine-tuned WizardMath model!"
42
+ )
43
+
44
+ if __name__ == "__main__":
45
+ iface.launch()
46