Spaces:
Sleeping
Sleeping
| # modelblob.py | |
| from fastapi import APIRouter, Request | |
| from fastapi.responses import HTMLResponse, JSONResponse | |
| import json, os, pathlib | |
| router = APIRouter() | |
| LOCAL_BLOB_PATH = os.getenv("MODEL_BLOB_PATH", "/tmp/model_blob.json") | |
| # ---- EMBEDDED MODEL BLOB (exactly as provided) ---- | |
| EMBEDDED_MODEL_BLOB = { | |
| "name": "publishers/hf-misri/models/realismenginesdxl_v30vae", | |
| "versionId": "001", | |
| "openSourceCategory": "THIRD_PARTY_OWNED_OSS", | |
| "supportedActions": { | |
| "deploy": { | |
| "modelDisplayName": "misri/realismEngineSDXL_v30VAE", | |
| "containerSpec": { | |
| "imageUri": "us-docker.pkg.dev/deeplearning-platform-release/vertex-model-garden/hf-inference-toolkit.cu125.0-1.ubuntu2204.py311:model-garden.hf-inference-toolkit-0-1-release_20250927.00_p0", | |
| "env": [ | |
| {"name": "HF_TASK","value": "text-to-image"}, | |
| {"name": "MODEL_ID","value": "misri/realismEngineSDXL_v30VAE"}, | |
| {"name": "HF_MODEL_ID","value": "misri/realismEngineSDXL_v30VAE"}, | |
| {"name": "HF_REVISION","value": "7d2f2de544b4aa26148b3a16b3469ed6dbb38a5c"}, | |
| {"name": "DEPLOY_SOURCE","value": "UI_HF_VERIFIED_MODEL"} | |
| ], | |
| "ports": [{"containerPort": 8080}] | |
| }, | |
| "dedicatedResources": { | |
| "machineSpec": { | |
| "machineType": "a3-highgpu-1g", | |
| "acceleratorType": "NVIDIA_H100_80GB", | |
| "acceleratorCount": 1 | |
| }, | |
| "maxReplicaCount": 1 | |
| }, | |
| "deployTaskName": "1 NVIDIA_H100_80GB a3-highgpu-1g", | |
| "deployMetadata": {} | |
| }, | |
| "multiDeployVertex": { | |
| "multiDeployVertex": [ | |
| { | |
| "modelDisplayName": "misri/realismEngineSDXL_v30VAE", | |
| "containerSpec": { | |
| "imageUri": "us-docker.pkg.dev/deeplearning-platform-release/vertex-model-garden/hf-inference-toolkit.cu125.0-1.ubuntu2204.py311:model-garden.hf-inference-toolkit-0-1-release_20250927.00_p0", | |
| "env": [ | |
| {"name": "HF_TASK","value": "text-to-image"}, | |
| {"name": "MODEL_ID","value": "misri/realismEngineSDXL_v30VAE"}, | |
| {"name": "HF_MODEL_ID","value": "misri/realismEngineSDXL_v30VAE"}, | |
| {"name": "HF_REVISION","value": "7d2f2de544b4aa26148b3a16b3469ed6dbb38a5c"}, | |
| {"name": "DEPLOY_SOURCE","value": "UI_HF_VERIFIED_MODEL"} | |
| ], | |
| "ports": [{"containerPort": 8080}] | |
| }, | |
| "dedicatedResources": { | |
| "machineSpec": { | |
| "machineType": "a3-highgpu-1g", | |
| "acceleratorType": "NVIDIA_H100_80GB", | |
| "acceleratorCount": 1 | |
| }, | |
| "maxReplicaCount": 1 | |
| }, | |
| "deployTaskName": "1 NVIDIA_H100_80GB a3-highgpu-1g", | |
| "deployMetadata": {} | |
| }, | |
| { | |
| "modelDisplayName": "misri/realismEngineSDXL_v30VAE", | |
| "containerSpec": { | |
| "imageUri": "us-docker.pkg.dev/deeplearning-platform-release/vertex-model-garden/hf-inference-toolkit.cu125.0-1.ubuntu2204.py311:model-garden.hf-inference-toolkit-0-1-release_20250927.00_p0", | |
| "env": [ | |
| {"name": "HF_TASK","value": "text-to-image"}, | |
| {"name": "MODEL_ID","value": "misri/realismEngineSDXL_v30VAE"}, | |
| {"name": "HF_MODEL_ID","value": "misri/realismEngineSDXL_v30VAE"}, | |
| {"name": "HF_REVISION","value": "7d2f2de544b4aa26148b3a16b3469ed6dbb38a5c"}, | |
| {"name": "DEPLOY_SOURCE","value": "UI_HF_VERIFIED_MODEL"} | |
| ], | |
| "ports": [{"containerPort": 8080}] | |
| }, | |
| "dedicatedResources": { | |
| "machineSpec": { | |
| "machineType": "g2-standard-12", | |
| "acceleratorType": "NVIDIA_L4", | |
| "acceleratorCount": 1 | |
| }, | |
| "maxReplicaCount": 1 | |
| }, | |
| "deployTaskName": "1 NVIDIA_L4 g2-standard-12", | |
| "deployMetadata": {} | |
| } | |
| ] | |
| } | |
| } | |
| } | |
| # --------------------------------------------------- | |
| def _ensure_dir(p: str): | |
| pathlib.Path(p).parent.mkdir(parents=True, exist_ok=True) | |
| def _pretty(obj) -> str: | |
| try: | |
| return json.dumps(obj, indent=2) | |
| except Exception: | |
| return str(obj) | |
| def view_model_blob(): | |
| """ | |
| Render: if /tmp/model_blob.json exists, show it; otherwise show the embedded blob. | |
| """ | |
| try: | |
| if os.path.exists(LOCAL_BLOB_PATH): | |
| raw = pathlib.Path(LOCAL_BLOB_PATH).read_text(encoding="utf-8") | |
| try: | |
| disp = json.dumps(json.loads(raw), indent=2) | |
| except json.JSONDecodeError: | |
| disp = raw | |
| source = f"File • {LOCAL_BLOB_PATH}" | |
| else: | |
| disp = _pretty(EMBEDDED_MODEL_BLOB) | |
| source = "Embedded (not yet written to file)" | |
| except Exception as e: | |
| disp = _pretty({"error": str(e)}) | |
| source = "Error" | |
| html = f""" | |
| <!doctype html> | |
| <html> | |
| <head> | |
| <meta charset="utf-8"/> | |
| <title>Model Blob</title> | |
| <style> | |
| body {{ font-family: ui-monospace, Menlo, Consolas, monospace; background:#0d1117; color:#c9d1d9; margin:24px; }} | |
| pre {{ background:#161b22; padding:16px; border-radius:8px; overflow:auto; white-space:pre-wrap; }} | |
| .row {{ max-width: 1000px; margin:auto; }} | |
| a {{ color:#58a6ff; text-decoration:none; }} | |
| .btn {{ display:inline-block; padding:8px 12px; border-radius:6px; background:#238636; color:#fff; margin-right:8px; }} | |
| .btn.secondary {{ background:#444; }} | |
| </style> | |
| </head> | |
| <body> | |
| <div class="row"> | |
| <h2>Current Model Blob <small style="font-size:12px; color:#8b949e;">({source})</small></h2> | |
| <div style="margin:10px 0;"> | |
| <a class="btn" href="/modelblob/write">Write to /tmp/model_blob.json</a> | |
| <a class="btn secondary" href="/">Back</a> | |
| </div> | |
| <pre>{disp}</pre> | |
| </div> | |
| </body> | |
| </html> | |
| """ | |
| return HTMLResponse(html) | |
| def write_model_blob(): | |
| """ | |
| Write the embedded blob verbatim to LOCAL_BLOB_PATH. | |
| """ | |
| try: | |
| _ensure_dir(LOCAL_BLOB_PATH) | |
| pathlib.Path(LOCAL_BLOB_PATH).write_text(_pretty(EMBEDDED_MODEL_BLOB), encoding="utf-8") | |
| return JSONResponse({"ok": True, "path": LOCAL_BLOB_PATH}) | |
| except Exception as e: | |
| return JSONResponse({"error": str(e)}, 500) | |
| async def overwrite_blob(req: Request): | |
| """ | |
| Overwrite the file with a posted blob (JSON or form 'blob'). | |
| """ | |
| try: | |
| ctype = req.headers.get("content-type","") | |
| if "application/json" in ctype: | |
| data = await req.json() | |
| txt = data if isinstance(data, (dict, list)) else data.get("blob") | |
| else: | |
| form = await req.form() | |
| txt = form.get("blob") | |
| if isinstance(txt, (dict, list)): | |
| out = json.dumps(txt, indent=2) | |
| elif isinstance(txt, str) and txt.strip(): | |
| out = txt | |
| else: | |
| return JSONResponse({"error": "Missing valid blob"}, 400) | |
| _ensure_dir(LOCAL_BLOB_PATH) | |
| pathlib.Path(LOCAL_BLOB_PATH).write_text(out, encoding="utf-8") | |
| return JSONResponse({"ok": True, "path": LOCAL_BLOB_PATH}) | |
| except Exception as e: | |
| return JSONResponse({"error": str(e)}, 500) | |
| # --------------------------------------------------------------------- | |
| # JSON outlet for backend ingestion | |
| # --------------------------------------------------------------------- | |
| def modelblob_json(): | |
| """Return the current model blob as JSON for backend ingestion.""" | |
| import os, json, pathlib | |
| path = os.getenv("MODEL_BLOB_PATH", "/tmp/model_blob.json") | |
| try: | |
| if os.path.exists(path): | |
| return json.loads(pathlib.Path(path).read_text(encoding="utf-8")) | |
| except Exception: | |
| pass | |
| # fallback to embedded blob if file missing or unreadable | |
| return EMBEDDED_MODEL_BLOB | |