Spaces:
Paused
Paused
| import gradio as gr | |
| import torch | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| # Nombre correcto del modelo en Hugging Face | |
| model_name = "LYRA" | |
| print("Cargando modelo... Esto puede tomar unos minutos la primera vez.") | |
| # Cargar el tokenizer | |
| tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) | |
| # Cargar el modelo con optimizaciones para recursos limitados | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_name, | |
| torch_dtype=torch.float16, # Usar float16 para ahorrar memoria | |
| device_map="auto", | |
| trust_remote_code=True, | |
| low_cpu_mem_usage=True | |
| ) | |
| print("隆Modelo cargado exitosamente!") | |
| def generate_response(prompt, max_length=200, temperature=0.7): | |
| # Formatear el prompt para DeepSeek Coder | |
| formatted_prompt = f"### Instruction:\n{prompt}\n\n### Response:\n" | |
| inputs = tokenizer.encode(formatted_prompt, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model.generate( | |
| inputs, | |
| max_length=len(inputs[0]) + max_length, | |
| temperature=temperature, | |
| do_sample=True, | |
| pad_token_id=tokenizer.eos_token_id, | |
| eos_token_id=tokenizer.eos_token_id, | |
| repetition_penalty=1.1 | |
| ) | |
| response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| # Extraer solo la respuesta generada | |
| response = response.split("### Response:\n")[-1].strip() | |
| return response | |
| # Interfaz Gradio | |
| interface = gr.Interface( | |
| fn=generate_response, | |
| inputs=[ | |
| gr.Textbox( | |
| label="Prompt", | |
| placeholder="Escribe tu pregunta de programaci贸n o c贸digo...", | |
| lines=3 | |
| ), | |
| gr.Slider( | |
| minimum=50, | |
| maximum=500, | |
| value=200, | |
| label="Longitud m谩xima de respuesta" | |
| ), | |
| gr.Slider( | |
| minimum=0.1, | |
| maximum=2.0, | |
| value=0.7, | |
| step=0.1, | |
| label="Temperatura (creatividad)" | |
| ) | |
| ], | |
| outputs=gr.Textbox(label="Respuesta del DeepSeek Coder", lines=10), | |
| title="馃殌 DeepSeek Coder 1.3B", | |
| description="Modelo de programaci贸n DeepSeek ejecut谩ndose en Hugging Face Spaces. Perfecto para ayuda con c贸digo, explicaciones y debugging.", | |
| examples=[ | |
| ["Escribe una funci贸n en Python para calcular fibonacci"], | |
| ["驴C贸mo puedo hacer una API REST con FastAPI?"], | |
| ["Explica qu茅 hace este c贸digo: for i in range(10): print(i**2)"], | |
| ["Crea una funci贸n JavaScript para validar emails"] | |
| ] | |
| ) | |
| if __name__ == "__main__": | |
| interface.launch() |