File size: 882 Bytes
b7b2532
 
 
 
a358dc5
b7b2532
 
 
 
 
028acfc
b7b2532
 
028acfc
 
b7b2532
 
028acfc
b7b2532
 
423ab13
b7b2532
 
423ab13
 
b7b2532
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
# backend_agent/api_generator.py
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch

MODEL_NAME = "facebook/opt-1.3b"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)

def generate_backend_code_llm(task_name):
    """
    Generates Python backend code (REST/GraphQL) for a given task using LLM.
    """
    prompt = f"""
You are an expert backend developer. Generate Python code for REST or GraphQL APIs
for the following task:
Task: {task_name}

Include routes/resolvers, input validation, and placeholders for business logic.
"""
    inputs = tokenizer(prompt, return_tensors="pt")
    outputs = model.generate(**inputs, max_new_tokens=350)
    code = tokenizer.decode(outputs[0], skip_special_tokens=True)

    if "Task:" in code:
        code = code.split("Task:")[-1].strip()
    return code