curiouscurrent commited on
Commit
b7b2532
·
verified ·
1 Parent(s): 85dc538

Update backend_agent/api_generator.py

Browse files
Files changed (1) hide show
  1. backend_agent/api_generator.py +33 -0
backend_agent/api_generator.py CHANGED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # backend_agent/api_generator.py
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
+
5
+ # Use a decoder-only LLM (OPT-125m for demo, can scale up)
6
+ MODEL_NAME = "facebook/opt-125m"
7
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
8
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
9
+
10
+ def generate_backend_code_llm(task_name):
11
+ """
12
+ Generates a Python backend skeleton (Flask API) for a given backend task
13
+ using an LLM.
14
+
15
+ Returns a string containing the code.
16
+ """
17
+ prompt = f"""
18
+ You are an expert backend developer. Generate a Python Flask API skeleton for this backend task.
19
+ Include routes, basic input validation, and placeholders for business logic.
20
+
21
+ Task: {task_name}
22
+
23
+ Python Flask code:
24
+ """
25
+ inputs = tokenizer(prompt, return_tensors="pt")
26
+ outputs = model.generate(**inputs, max_new_tokens=250)
27
+ code = tokenizer.decode(outputs[0], skip_special_tokens=True)
28
+
29
+ # Clean up the prompt portion from the output
30
+ if "Python Flask code:" in code:
31
+ code = code.split("Python Flask code:")[-1].strip()
32
+
33
+ return code