Spaces:
Build error
Build error
| import gradio as gr | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| import torch | |
| # Cargar el modelo y el tokenizador | |
| model_name = "distilgpt2" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForCausalLM.from_pretrained(model_name) | |
| def generate_response(prompt, max_length=100): | |
| inputs = tokenizer.encode(prompt, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model.generate( | |
| inputs, | |
| max_length=max_length, | |
| num_return_sequences=1, | |
| temperature=0.7, | |
| top_p=0.9, | |
| do_sample=True | |
| ) | |
| response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| return response.strip() | |
| def chatbot(message, history): | |
| history = history or [] | |
| # Construir el prompt | |
| prompt = "Eres un asistente AI amigable y útil. Responde de manera concisa y coherente.\n\n" | |
| for human, ai in history: | |
| prompt += f"Human: {human}\nAI: {ai}\n" | |
| prompt += f"Human: {message}\nAI:" | |
| response = generate_response(prompt) | |
| history.append((message, response)) | |
| return history, history | |
| iface = gr.Interface( | |
| fn=chatbot, | |
| inputs=["text", "state"], | |
| outputs=["chatbot", "state"], | |
| title="Tu Compañero AI con DistilGPT-2", | |
| description="Un chatbot de IA utilizando el modelo DistilGPT-2 para conversaciones simples.", | |
| ) | |
| iface.launch() |