Godot-ai / app.py
OrangyDev's picture
Update app.py
9d85df1 verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# 1. DEFINIR VARIABLES
userxd = "OrangyDev"
model_id = f"{userxd}/godot4-expert-ai"
# 2. CARGAR TOKENIZER Y MODELO
tokenizer = AutoTokenizer.from_pretrained(model_id)
# Usamos float16 y seleccionamos CPU o GPU automΓ‘ticamente
model = AutoModelForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
device_map="auto"
)
# 3. FUNCIΓ“N DE CHAT
def chat_godot(message, history):
# Formato de prompt igual al del entrenamiento
prompt = f"### User: {message}\n### Assistant:"
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
with torch.no_grad():
output = model.generate(
**inputs,
max_new_tokens=150,
temperature=0.7,
do_sample=True,
repetition_penalty=1.2,
eos_token_id=tokenizer.eos_token_id
)
full_text = tokenizer.decode(output[0], skip_special_tokens=True)
# Cortamos para quedarnos solo con la respuesta de la IA
response = full_text.split("### Assistant:")[-1].strip()
return response
# 4. INTERFAZ DE GRADIO
demo = gr.ChatInterface(
fn=chat_godot,
title="Godot 4 Expert AI",
description=f"IA entrenada por {userxd} para resolver dudas de Godot 4 y GDScript.",
examples=["ΒΏQuiΓ©n es tu creador?", "ΒΏQuiΓ©n es Rafa Laguna?", "ΒΏCΓ³mo muevo un personaje en Godot 4?"],
theme="soft"
)
if __name__ == "__main__":
demo.launch()