Spaces:
Sleeping
Sleeping
File size: 1,524 Bytes
e7b3e27 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import os
import random
from datetime import datetime
from typing import Optional
from huggingface_hub import InferenceClient
# Directorio donde se guardan las imágenes generadas
OUTPUT_DIR = "generated_images"
os.makedirs(OUTPUT_DIR, exist_ok=True)
# Cliente de inferencia (igual que en Sofia Rivera)
client = InferenceClient()
def generate_image_from_prompt(
prompt: str,
negative_prompt: str = "",
model_name: str = "black-forest-labs/FLUX.1-dev",
seed: Optional[int] = None,
) -> tuple[Optional[str], str]:
"""
Genera una imagen usando Hugging Face InferenceClient.text_to_image
y la guarda en OUTPUT_DIR.
Devuelve (image_path, status_message).
Si hay error, image_path = None y status_message contiene el error.
"""
try:
if seed is None:
seed = random.randint(0, 2_147_483_647)
image = client.text_to_image(
prompt=prompt,
negative_prompt=negative_prompt,
model=model_name,
guidance_scale=7.5,
num_inference_steps=50,
)
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"sofia_{timestamp}_{seed}.png"
file_path = os.path.join(OUTPUT_DIR, filename)
image.save(file_path)
status = f"✅ Imagen generada y guardada: {filename}\nModelo: {model_name}\nSeed: {seed}"
return file_path, status
except Exception as e:
error_msg = f"❌ Error al generar imagen: {str(e)}"
return None, error_msg
|