File size: 1,300 Bytes
53c4963 ae7ba67 53c4963 ae7ba67 fe45ec3 ae7ba67 53c4963 4ef4ada ae7ba67 53c4963 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import os
import sys
# Umgebungsvariablen
os.environ["GRADIO_MAX_FILE_SIZE"] = "100mb"
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
os.environ["OMP_NUM_THREADS"] = "1"
# Temp-Verzeichnis (optional)
temp_dir = os.path.join(os.getcwd(), "temp_uploads")
os.makedirs(temp_dir, exist_ok=True)
os.environ["GRADIO_TEMP_DIR"] = temp_dir
os.environ["GRADIO_SERVER_TIMEOUT"] = "300" # 5 Minuten Timeout
# App importieren
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from app import main_ui, load_txt2img, load_img2img # WICHTIG: Funktionen importieren
if __name__ == "__main__":
demo = main_ui()
# ---- MODELLE VORLADEN (hier hin verschoben) ----
print("🚀 Pre-loading models for faster first response...")
try:
txt2img_pipe = load_txt2img("runwayml/stable-diffusion-v1-5")
print("✅ Base model loaded.")
_ = load_img2img(keep_environment=True)
_ = load_img2img(keep_environment=False)
print("✅ ControlNet pipelines loaded.")
except Exception as e:
print(f"⚠️ Some models could not be pre-loaded: {e}")
# Server starten
demo.queue(max_size=3).launch(
server_name="0.0.0.0",
server_port=7860,
max_file_size="100mb",
ssl_verify=False,
share=False
) |