ZewAI_3.9_Code / app.py
Zakomako4567's picture
Create app.py
721ae99 verified
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
class ZewAI3:
def __init__(self, model_name="microsoft/phi-2"):
# We use phi-2 as a base because it's tiny but "super good" at coding
print(f"Initializing ZewAI 3 based on {model_name}...")
self.tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
self.model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float32,
trust_remote_code=True
)
self.pipe = pipeline("text-generation", model=self.model, tokenizer=self.tokenizer)
def generate_code(self, prompt, max_length=512):
# Specific formatting to help the AI focus on coding logic
formatted_prompt = f"Instruct: Write the following code: {prompt}\nOutput:"
results = self.pipe(
formatted_prompt,
max_new_tokens=max_length,
do_sample=True,
temperature=0.7
)
return results[0]['generated_text']
# Example usage for the editor:
if __name__ == "__main__":
zew_model = ZewAI3()
test_prompt = "Create a single-file HTML app with a dark mode toggle."
print(zew_model.generate_code(test_prompt))