Spaces:
Sleeping
Sleeping
File size: 2,252 Bytes
7a1d414 002bec9 a8704d0 5962567 7a1d414 9bc957e 600587b 9bc957e 600587b 7a1d414 0106bb4 7a1d414 a8704d0 7a1d414 600587b 0106bb4 7a1d414 9bc957e 0106bb4 9bc957e 7a1d414 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
"""Main entrypoint for the Stable Diffusion application.
This module initializes the text-to-image and image-to-image pipelines,
sets up the UI, and launches the Gradio interface.
"""
from __future__ import annotations
import os
import sys
# for HF spaces
sys.path.append(os.path.abspath("src"))
import torch
from dotenv import load_dotenv
from sdgen.config import AppSettings
from sdgen.sd import load_pipeline, prepare_img2img_pipeline, warmup_pipeline
from sdgen.ui import build_ui
from sdgen.utils.logger import get_logger
from sdgen.utils.lora_downloader import ensure_loras
logger = get_logger(__name__)
load_dotenv()
def detect_device() -> str:
"""Return `"cuda"` if a GPU is available, otherwise `"cpu"`.
Returns:
The selected device string.
"""
if torch.cuda.is_available():
logger.info("CUDA available → using GPU")
return "cuda"
logger.warning("CUDA not detected → falling back to CPU")
return "cpu"
def main() -> None:
"""Start the Stable Diffusion UI and initialize inference pipelines."""
settings = AppSettings()
model_id1 = settings.model_id1
model_id2 = settings.model_id2
device = "cpu"
# Download LoRAs (runtime)
try:
ensure_loras()
except Exception as exc:
logger.warning("LoRA download issue: %s", exc)
logger.info("Loading pipeline %s", model_id1)
pipes = {
"SD1.5": load_pipeline(
model_id=model_id1,
device=device,
use_fp16=device == "cuda",
enable_xformers=settings.enable_xformers,
),
"Turbo": load_pipeline(
model_id=model_id2,
device=device,
use_fp16=device == "cuda",
enable_xformers=settings.enable_xformers,
),
}
if device == "cuda" and settings.warmup:
warmup_pipeline(pipes["Turbo"])
img2img_pipes = {
"SD1.5": prepare_img2img_pipeline(pipes["SD1.5"]),
"Turbo": prepare_img2img_pipeline(pipes["Turbo"]),
}
demo = build_ui(pipes, img2img_pipes)
demo.launch(
server_name=settings.server_host,
server_port=settings.server_port,
share=settings.share,
)
if __name__ == "__main__":
main()
|