lakshraina2 commited on
Commit
7436ade
·
verified ·
1 Parent(s): 9aeb8f1

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -0
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
+ # Point this to the repository you just created in Step 1
6
+ model_id = "lakshraina2/leetcodeAI"
7
+
8
+ print("Downloading and loading model...")
9
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
10
+ # We load in float32 because the free tier uses standard CPU instances
11
+ model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float32)
12
+
13
+ def solve_problem(problem_text):
14
+ prompt = f"### Instruction:\nSolve this LeetCode problem:\n{problem_text}\n\n### Response:\n"
15
+ inputs = tokenizer(prompt, return_tensors="pt")
16
+
17
+ outputs = model.generate(
18
+ **inputs,
19
+ max_new_tokens=512,
20
+ temperature=0.2,
21
+ do_sample=True
22
+ )
23
+
24
+ solution = tokenizer.decode(outputs[0], skip_special_tokens=True)
25
+
26
+ try:
27
+ code_only = solution.split("### Response:\n")[1].strip()
28
+ except IndexError:
29
+ code_only = solution
30
+
31
+ return code_only
32
+
33
+ # Gradio automatically builds an API around this function
34
+ iface = gr.Interface(fn=solve_problem, inputs="text", outputs="text")
35
+ iface.launch()