File size: 40,046 Bytes
7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 7ec8df5 7dcaa51 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 | import json
import uuid
import urllib.request
import urllib.parse
import urllib.error
import os
import random
import time
import shutil
import asyncio
import requests
import httpx
from typing import List, Dict, Any, Optional
from threading import Lock
from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, UploadFile, File
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse, Response
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
app = FastAPI()
# 允许跨域
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
# --- WebSocket 状态管理器 ---
class ConnectionManager:
def __init__(self):
self.active_connections: List[WebSocket] = []
self.user_connections: Dict[str, WebSocket] = {}
async def connect(self, websocket: WebSocket, client_id: str = None):
await websocket.accept()
self.active_connections.append(websocket)
if client_id:
self.user_connections[client_id] = websocket
print(f"WS Connected. Total: {len(self.active_connections)}")
await self.broadcast_count()
async def disconnect(self, websocket: WebSocket, client_id: str = None):
if websocket in self.active_connections:
self.active_connections.remove(websocket)
if client_id and client_id in self.user_connections:
del self.user_connections[client_id]
print(f"WS Disconnected. Total: {len(self.active_connections)}")
await self.broadcast_count()
async def send_personal_message(self, message: dict, client_id: str):
if client_id in self.user_connections:
try:
await self.user_connections[client_id].send_text(json.dumps(message))
except Exception as e:
print(f"WS Send Error ({client_id}): {e}")
self.disconnect(self.user_connections[client_id], client_id)
async def broadcast_count(self):
count = len(self.active_connections)
data = json.dumps({"type": "stats", "online_count": count})
print(f"Broadcasting online count: {count}")
# 创建副本进行遍历,防止遍历时修改列表
for connection in self.active_connections[:]:
try:
await connection.send_text(data)
except Exception as e:
print(f"Broadcast error for client {id(connection)}: {e}")
self.active_connections.remove(connection)
async def broadcast_new_image(self, image_data: dict):
"""广播新生成的图片数据给所有客户端"""
data = json.dumps({"type": "new_image", "data": image_data})
print(f"Broadcasting new image to {len(self.active_connections)} clients")
for connection in self.active_connections[:]:
try:
await connection.send_text(data)
except Exception as e:
print(f"Broadcast image error for client {id(connection)}: {e}")
self.active_connections.remove(connection)
manager = ConnectionManager()
# 全局事件循环引用
GLOBAL_LOOP = None
@app.on_event("startup")
async def startup_event():
global GLOBAL_LOOP
GLOBAL_LOOP = asyncio.get_running_loop()
@app.websocket("/ws/stats")
async def websocket_endpoint(websocket: WebSocket, client_id: str = None):
await manager.connect(websocket, client_id)
try:
while True:
# 接收客户端心跳包
data = await websocket.receive_text()
if data == "ping":
await websocket.send_text(json.dumps({"type": "pong"}))
except WebSocketDisconnect:
print(f"WebSocket disconnected normally: {id(websocket)}")
await manager.disconnect(websocket, client_id)
except Exception as e:
print(f"WS Error for {id(websocket)}: {e}")
await manager.disconnect(websocket, client_id)
# --- 配置区域 ---
# 支持多卡负载均衡:配置多个 ComfyUI 地址
COMFYUI_INSTANCES = [
"127.0.0.1:8188", # 本机默认端口
"127.0.0.1:4090", # 显卡分流端口
]
# 保持向后兼容,默认使用第一个
COMFYUI_ADDRESS = COMFYUI_INSTANCES[0]
CLIENT_ID = str(uuid.uuid4())
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
WORKFLOW_DIR = os.path.join(BASE_DIR, "workflows")
WORKFLOW_PATH = os.path.join(WORKFLOW_DIR, "Z-Image.json")
STATIC_DIR = os.path.join(BASE_DIR, "static")
OUTPUT_DIR = os.path.join(BASE_DIR, "output")
HISTORY_FILE = os.path.join(BASE_DIR, "history.json")
QUEUE = []
QUEUE_LOCK = Lock()
HISTORY_LOCK = Lock()
# 移除全局执行锁,允许并发以便分发到不同显卡
# EXECUTION_LOCK = Lock()
NEXT_TASK_ID = 1
# 负载均衡:本地任务计数(解决 ComfyUI 队列更新延迟导致的竞态问题)
BACKEND_LOCAL_LOAD = {addr: 0 for addr in COMFYUI_INSTANCES}
LOAD_LOCK = Lock()
# 确保必要的目录存在
os.makedirs(OUTPUT_DIR, exist_ok=True)
os.makedirs(STATIC_DIR, exist_ok=True)
os.makedirs(WORKFLOW_DIR, exist_ok=True)
GLOBAL_CONFIG_FILE = os.path.join(BASE_DIR, "global_config.json")
GLOBAL_CONFIG_LOCK = Lock()
# 挂载静态文件
app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
app.mount("/output", StaticFiles(directory=OUTPUT_DIR), name="output")
class GenerateRequest(BaseModel):
prompt: str = ""
width: int = 1024
height: int = 1024
workflow_json: str = "Z-Image.json"
params: Dict[str, Any] = {}
type: str = "zimage"
client_id: str = ""
class CloudGenRequest(BaseModel):
prompt: str
api_key: str
resolution: str = "1024x1024"
client_id: Optional[str] = None
type: str = "default"
image_urls: List[str] = []
model: str = ""
class DeleteHistoryRequest(BaseModel):
timestamp: float
# --- 负载均衡辅助功能 ---
def get_best_backend():
"""选择队列压力最小的后端"""
best_backend = COMFYUI_INSTANCES[0]
min_queue_size = float('inf')
for addr in COMFYUI_INSTANCES:
try:
# 获取 ComfyUI 队列状态
with urllib.request.urlopen(f"http://{addr}/queue", timeout=1) as response:
data = json.loads(response.read())
# 计算总任务数:运行中 + 等待中
remote_load = len(data.get('queue_running', [])) + len(data.get('queue_pending', []))
# 获取本地记录的负载(解决并发请求时的竞态条件)
with LOAD_LOCK:
local_load = BACKEND_LOCAL_LOAD.get(addr, 0)
# 使用两者的最大值作为有效负载
# 这样既能感知外部提交的任务(remote),也能感知刚提交但未显示的内部任务(local)
effective_load = max(remote_load, local_load)
print(f"Backend {addr} load: {effective_load} (Remote: {remote_load}, Local: {local_load})")
if effective_load < min_queue_size:
min_queue_size = effective_load
best_backend = addr
except Exception as e:
print(f"Backend {addr} unreachable: {e}")
continue
print(f"Selected backend: {best_backend}")
return best_backend
# --- 辅助功能 ---
def download_image(comfy_address, comfy_url_path, prefix="studio_"):
"""将远程 ComfyUI 图片保存到本地并返回相对路径"""
filename = f"{prefix}{uuid.uuid4().hex[:10]}.png"
local_path = os.path.join(OUTPUT_DIR, filename)
full_url = f"http://{comfy_address}{comfy_url_path}"
try:
with urllib.request.urlopen(full_url) as response, open(local_path, 'wb') as out_file:
shutil.copyfileobj(response, out_file)
return f"/output/{filename}" # 返回前端可用的路径
except Exception as e:
print(f"下载图片失败: {e} (URL: {full_url})")
# 如果下载失败,返回通过本服务代理的地址,而不是直接返回 ComfyUI 地址
# ComfyUI 地址 (127.0.0.1) 在外部无法访问
# 将 /view?xxx 替换为 /api/view?xxx
if comfy_url_path.startswith("/view"):
return comfy_url_path.replace("/view", "/api/view", 1)
return full_url
def save_to_history(record):
"""保存记录到 JSON 文件"""
with HISTORY_LOCK:
history = []
if os.path.exists(HISTORY_FILE):
try:
with open(HISTORY_FILE, 'r', encoding='utf-8') as f:
history = json.load(f)
except: pass
# 确保时间戳是浮点数,方便排序
if "timestamp" not in record:
record["timestamp"] = time.time()
history.insert(0, record)
# 限制总记录数,避免文件过大
with open(HISTORY_FILE, 'w', encoding='utf-8') as f:
json.dump(history[:5000], f, ensure_ascii=False, indent=4)
def get_comfy_history(comfy_address, prompt_id):
try:
with urllib.request.urlopen(f"http://{comfy_address}/history/{prompt_id}") as response:
return json.loads(response.read())
except Exception as e:
# print(f"获取 ComfyUI 历史失败: {e}")
return {}
# --- 接口路由 ---
@app.get("/api/view")
def view_image(filename: str, type: str = "input", subfolder: str = ""):
try:
# 默认尝试从第一个实例查看(通常输入图片上传后各处都有,或者我们只看第一个)
# 如果是 output 图片,实际上 generate 接口已经搬运到本地了,不会走这个接口查看结果
# 这个接口主要用于查看上传的原图
url = f"http://{COMFYUI_INSTANCES[0]}/view"
params = {"filename": filename, "type": type, "subfolder": subfolder}
r = requests.get(url, params=params)
return Response(content=r.content, media_type=r.headers.get('Content-Type'))
except Exception as e:
raise HTTPException(status_code=404, detail="Image not found")
@app.post("/api/upload")
async def upload_image(files: List[UploadFile] = File(...)):
uploaded_files = []
# 只需要读取一次文件内容
files_content = []
for file in files:
content = await file.read()
files_content.append((file, content))
# 遍历所有后端实例进行上传
for file, content in files_content:
success_count = 0
last_result = None
for addr in COMFYUI_INSTANCES:
try:
# Prepare multipart upload for ComfyUI
files_data = {'image': (file.filename, content, file.content_type)}
# Upload to specific backend
response = requests.post(f"http://{addr}/upload/image", files=files_data, timeout=5)
if response.status_code == 200:
last_result = response.json()
success_count += 1
else:
print(f"Upload to {addr} failed: {response.text}")
except Exception as e:
print(f"Upload error for {addr}: {e}")
if success_count > 0 and last_result:
uploaded_files.append({"comfy_name": last_result.get("name", file.filename)})
else:
raise HTTPException(status_code=500, detail=f"Failed to upload to any backend")
return {"files": uploaded_files}
@app.get("/")
async def index():
return FileResponse(os.path.join(STATIC_DIR, "index.html"))
@app.get("/api/history")
async def get_history_api(type: str = None):
if os.path.exists(HISTORY_FILE):
try:
with open(HISTORY_FILE, 'r', encoding='utf-8') as f:
data = json.load(f)
# 过滤类型
if type:
# 如果请求 zimage,同时返回 cloud 类型的记录
target_types = [type]
if type == "zimage":
target_types.append("cloud")
data = [item for item in data if item.get("type", "zimage") in target_types]
# 过滤无效数据(无图片)
data = [item for item in data if item.get("images") and len(item["images"]) > 0]
# 后端进行排序,确保顺序正确
# 处理兼容性:旧数据可能是字符串时间,新数据是浮点时间戳
def sort_key(item):
ts = item.get("timestamp", 0)
if isinstance(ts, (int, float)):
return float(ts)
return 0 # 旧数据排在最后
data.sort(key=sort_key, reverse=True)
# 补充 is_cloud 字段:如果历史记录中没有标记,但文件名包含特征字符,则补充标记
for item in data:
if "is_cloud" not in item and item.get("images"):
# 检查是否有任意一张图片符合 cloud 特征
if any("cloud_angle" in img or "cloud_" in img for img in item["images"]):
item["is_cloud"] = True
return data
except Exception as e:
print(f"读取历史文件失败: {e}")
return []
return []
@app.get("/api/queue_status")
async def get_queue_status(client_id: str):
with QUEUE_LOCK:
total = len(QUEUE)
positions = [i + 1 for i, t in enumerate(QUEUE) if t["client_id"] == client_id]
position = positions[0] if positions else 0
return {"total": total, "position": position}
@app.post("/api/history/delete")
async def delete_history(req: DeleteHistoryRequest):
if not os.path.exists(HISTORY_FILE):
return {"success": False, "message": "History file not found"}
try:
with HISTORY_LOCK:
with open(HISTORY_FILE, 'r', encoding='utf-8') as f:
history = json.load(f)
# Find and remove
target_record = None
new_history = []
for item in history:
is_match = False
item_ts = item.get("timestamp", 0)
# 尝试数字匹配
if isinstance(req.timestamp, (int, float)) and isinstance(item_ts, (int, float)):
if abs(float(item_ts) - float(req.timestamp)) < 0.001:
is_match = True
# 尝试字符串匹配
elif str(item_ts) == str(req.timestamp):
is_match = True
if is_match:
target_record = item
else:
new_history.append(item)
if target_record:
# Save history first (atomic-like)
with open(HISTORY_FILE, 'w', encoding='utf-8') as f:
json.dump(new_history, f, ensure_ascii=False, indent=4)
# Delete files outside lock (IO operation)
if target_record:
for img_url in target_record.get("images", []):
# img_url is like "/output/filename.png"
if img_url.startswith("/output/"):
filename = img_url.split("/")[-1]
file_path = os.path.join(OUTPUT_DIR, filename)
if os.path.exists(file_path):
try:
os.remove(file_path)
except Exception as e:
print(f"Failed to delete file {file_path}: {e}")
return {"success": True}
else:
return {"success": False, "message": "Record not found"}
except Exception as e:
print(f"Delete history error: {e}")
return {"success": False, "message": str(e)}
class TokenRequest(BaseModel):
token: str
@app.get("/api/config/token")
async def get_global_token():
if os.path.exists(GLOBAL_CONFIG_FILE):
try:
with open(GLOBAL_CONFIG_FILE, 'r', encoding='utf-8') as f:
config = json.load(f)
return {"token": config.get("modelscope_token", "")}
except:
return {"token": ""}
return {"token": ""}
@app.post("/api/config/token")
async def set_global_token(req: TokenRequest):
with GLOBAL_CONFIG_LOCK:
config = {}
if os.path.exists(GLOBAL_CONFIG_FILE):
try:
with open(GLOBAL_CONFIG_FILE, 'r', encoding='utf-8') as f:
config = json.load(f)
except: pass
config["modelscope_token"] = req.token.strip()
with open(GLOBAL_CONFIG_FILE, 'w', encoding='utf-8') as f:
json.dump(config, f, indent=4)
return {"success": True}
@app.delete("/api/config/token")
async def delete_global_token():
with GLOBAL_CONFIG_LOCK:
if os.path.exists(GLOBAL_CONFIG_FILE):
try:
config = {}
with open(GLOBAL_CONFIG_FILE, 'r', encoding='utf-8') as f:
config = json.load(f)
if "modelscope_token" in config:
del config["modelscope_token"]
with open(GLOBAL_CONFIG_FILE, 'w', encoding='utf-8') as f:
json.dump(config, f, indent=4)
except: pass
return {"success": True}
class CloudPollRequest(BaseModel):
task_id: str
api_key: str
client_id: Optional[str] = None
@app.post("/api/angle/poll_status")
async def poll_angle_cloud(req: CloudPollRequest):
"""
Resume polling for an existing Angle task.
"""
base_url = 'https://api-inference.modelscope.cn/'
clean_token = req.api_key.strip()
headers = {
"Authorization": f"Bearer {clean_token}",
"Content-Type": "application/json",
"X-ModelScope-Async-Mode": "true"
}
task_id = req.task_id
print(f"Resuming polling for Angle Task: {task_id}")
try:
async with httpx.AsyncClient(timeout=30) as client:
# Poll Status (Another 300 retries)
for i in range(300):
await asyncio.sleep(2)
try:
result = await client.get(
f"{base_url}v1/tasks/{task_id}",
headers={**headers, "X-ModelScope-Task-Type": "image_generation"},
)
data = result.json()
status = data.get("task_status")
if status == "SUCCEED":
img_url = data["output_images"][0]
print(f"Angle Task SUCCEED: {img_url}")
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "SUCCEED",
"task_id": task_id
}, req.client_id)
# Download logic
local_path = ""
try:
async with httpx.AsyncClient() as dl_client:
img_res = await dl_client.get(img_url)
if img_res.status_code == 200:
filename = f"cloud_angle_{int(time.time())}.png"
file_path = os.path.join(OUTPUT_DIR, filename)
with open(file_path, "wb") as f:
f.write(img_res.content)
local_path = f"/output/{filename}"
else:
local_path = img_url
except Exception:
local_path = img_url
record = {
"timestamp": time.time(),
"prompt": f"Resumed {task_id}",
"images": [local_path],
"type": "angle"
}
save_to_history(record)
return {"url": local_path}
elif status == "FAILED":
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "FAILED",
"task_id": task_id
}, req.client_id)
raise Exception(f"ModelScope task failed: {data}")
if i % 5 == 0:
print(f"Angle Task {task_id} status: {status} ({i}/150)")
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": f"{status} ({i}/150)",
"task_id": task_id,
"progress": i,
"total": 150
}, req.client_id)
except Exception as loop_e:
print(f"Angle polling error: {loop_e}")
continue
print(f"Angle Task Timeout Again: {task_id}")
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "TIMEOUT",
"task_id": task_id
}, req.client_id)
return {"status": "timeout", "task_id": task_id, "message": "Task still pending"}
except Exception as e:
print(f"Angle polling error: {e}")
raise HTTPException(status_code=400, detail=str(e))
@app.post("/api/angle/generate")
async def generate_angle_cloud(req: CloudGenRequest):
"""
Dedicated endpoint for Angle/Qwen-Image-Edit tasks.
Logic mirrors test/main.py but uses async httpx.
"""
base_url = 'https://api-inference.modelscope.cn/'
clean_token = req.api_key.strip()
headers = {
"Authorization": f"Bearer {clean_token}",
"Content-Type": "application/json",
"X-ModelScope-Async-Mode": "true"
}
# Prepare payload exactly as in test/main.py
# test/main.py: "image_url": [data_uri]
# req.image_urls is already a list of strings
payload = {
"model": "Qwen/Qwen-Image-Edit-2511",
"prompt": req.prompt.strip(),
"image_url": req.image_urls
}
print(f"Angle Cloud Request: {payload['model']}, Prompt: {payload['prompt'][:20]}...")
try:
async with httpx.AsyncClient(timeout=30) as client:
# 1. Submit Task
submit_res = await client.post(
f"{base_url}v1/images/generations",
headers=headers,
json=payload # httpx handles json serialization
)
if submit_res.status_code != 200:
try:
detail = submit_res.json()
except:
detail = submit_res.text
print(f"Angle Submit Error: {detail}")
raise HTTPException(status_code=submit_res.status_code, detail=detail)
task_id = submit_res.json().get("task_id")
print(f"Angle Task Submitted, ID: {task_id}")
# Notify frontend via WS
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "SUBMITTED",
"task_id": task_id,
"progress": 0,
"total": 150
}, req.client_id)
# 2. Poll Status (300 retries * 2s = 600s / 10min)
for i in range(300):
await asyncio.sleep(2)
try:
result = await client.get(
f"{base_url}v1/tasks/{task_id}",
headers={**headers, "X-ModelScope-Task-Type": "image_generation"},
)
data = result.json()
status = data.get("task_status")
if status == "SUCCEED":
img_url = data["output_images"][0]
print(f"Angle Task SUCCEED: {img_url}")
# Notify WS success
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "SUCCEED",
"task_id": task_id
}, req.client_id)
# Download and Save Logic (reused from original generate)
local_path = ""
try:
# 异步下载
async with httpx.AsyncClient() as dl_client:
img_res = await dl_client.get(img_url)
if img_res.status_code == 200:
filename = f"cloud_angle_{int(time.time())}.png"
file_path = os.path.join(OUTPUT_DIR, filename)
with open(file_path, "wb") as f:
f.write(img_res.content)
local_path = f"/output/{filename}"
print(f"Angle Image saved: {local_path}")
else:
local_path = img_url
except Exception as dl_e:
print(f"Download error: {dl_e}")
local_path = img_url
# Save history
record = {
"timestamp": time.time(),
"prompt": req.prompt,
"images": [local_path],
"type": "angle", # Distinct type
"is_cloud": True
}
save_to_history(record)
return {"url": local_path}
elif status == "FAILED":
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "FAILED",
"task_id": task_id
}, req.client_id)
raise Exception(f"ModelScope task failed: {data}")
# Log polling status every 5 times (10 seconds)
if i % 5 == 0:
print(f"Angle Task {task_id} status: {status} ({i}/150)")
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": f"{status} ({i}/150)",
"task_id": task_id,
"progress": i,
"total": 150
}, req.client_id)
except Exception as loop_e:
# Log polling errors
print(f"Angle polling error (retrying): {loop_e}")
continue
# Timeout Handling
print(f"Angle Task Timeout: {task_id}")
if req.client_id:
await manager.send_personal_message({
"type": "cloud_status",
"status": "TIMEOUT",
"task_id": task_id
}, req.client_id)
# Instead of raising Exception, return special status
return {"status": "timeout", "task_id": task_id, "message": "Task still pending"}
except Exception as e:
print(f"Angle generation error: {e}")
raise HTTPException(status_code=400, detail=str(e))
@app.post("/generate")
async def generate_cloud(req: CloudGenRequest):
base_url = 'https://api-inference.modelscope.cn/'
clean_token = req.api_key.strip()
headers = {
"Authorization": f"Bearer {clean_token}",
"Content-Type": "application/json",
}
# 按照官方 Z-Image 标准版参数
# if req.type == "angle" or req.model == "Qwen/Qwen-Image-Edit-2511":
# # Deprecated: Angle logic moved to /api/angle/generate
# pass
# 默认 Z-Image Turbo 模式
payload = {
"model": "Tongyi-MAI/Z-Image-Turbo",
"prompt": req.prompt.strip(),
"size": req.resolution,
"n": 1
}
try:
async with httpx.AsyncClient(timeout=30) as client:
# A. 提交异步任务
print(f"Submitting ModelScope task for prompt: {req.prompt[:20]}...")
# Use json parameter to ensure standard serialization (matching requests behavior)
# This handles Content-Type and ensure_ascii default behavior
submit_res = await client.post(
f"{base_url}v1/images/generations",
headers={**headers, "X-ModelScope-Async-Mode": "true"},
json=payload
)
if submit_res.status_code != 200:
# 尝试解析错误详情
try:
detail = submit_res.json()
except:
detail = submit_res.text
print(f"ModelScope Submit Error: {detail}")
raise HTTPException(status_code=submit_res.status_code, detail=detail)
task_id = submit_res.json().get("task_id")
print(f"Task submitted, ID: {task_id}")
# B. 轮询任务状态
# 增加到 200 次轮询 * 3秒 = 600秒 (10分钟) 超时
for i in range(200):
await asyncio.sleep(3)
try:
result = await client.get(
f"{base_url}v1/tasks/{task_id}",
headers={**headers, "X-ModelScope-Task-Type": "image_generation"},
)
data = result.json()
status = data.get("task_status")
if i % 5 == 0:
print(f"Task {task_id} status check {i}: {status}")
if status == "SUCCEED":
img_url = data["output_images"][0]
print(f"Task {task_id} SUCCEED: {img_url}")
# 下载保存到本地 output
local_path = ""
try:
# 异步下载
async with httpx.AsyncClient() as dl_client:
img_res = await dl_client.get(img_url)
if img_res.status_code == 200:
filename = f"cloud_{int(time.time())}.png"
file_path = os.path.join(OUTPUT_DIR, filename)
with open(file_path, "wb") as f:
f.write(img_res.content)
local_path = f"/output/{filename}"
print(f"Image saved locally: {local_path}")
else:
print(f"Failed to download image: {img_res.status_code}")
local_path = img_url # Fallback to remote URL
except Exception as dl_e:
print(f"Download error: {dl_e}")
local_path = img_url # Fallback
# 保存到本地历史 (使用本地路径以便前端更好加载,或者仍然存 remote 但 download 不 work?)
# 需求是 output 目录保存,前端展示通常优先用本地快
record = {
"timestamp": time.time(),
"prompt": req.prompt,
"images": [local_path], # 使用本地路径
"type": "cloud"
}
save_to_history(record)
# 广播新图片
try:
await manager.broadcast_new_image(record)
except Exception as e:
print(f"Broadcast error: {e}")
return {"url": local_path}
elif status == "FAILED":
raise Exception(f"ModelScope task failed: {data}")
except Exception as loop_e:
print(f"Polling error (retrying): {loop_e}")
continue
raise Exception("Cloud generation timeout (180s)")
except Exception as e:
print(f"Cloud generation error: {e}")
raise HTTPException(status_code=400, detail=str(e))
@app.post("/api/generate")
def generate(req: GenerateRequest):
global NEXT_TASK_ID
# 1. 入队
current_task = None
target_backend = None
with QUEUE_LOCK:
task_id = NEXT_TASK_ID
NEXT_TASK_ID += 1
current_task = {"task_id": task_id, "client_id": req.client_id}
QUEUE.append(current_task)
try:
# 2. 负载均衡:选择最佳后端(移除 EXECUTION_LOCK 全局锁以支持并发)
target_backend = get_best_backend()
# 增加本地负载计数
with LOAD_LOCK:
BACKEND_LOCAL_LOAD[target_backend] += 1
# 3. 开始执行任务
workflow_path = os.path.join(WORKFLOW_DIR, req.workflow_json)
# 兼容性处理:如果文件不存在且是默认值,尝试使用 WORKFLOW_PATH
if not os.path.exists(workflow_path) and req.workflow_json == "Z-Image.json":
workflow_path = WORKFLOW_PATH
if not os.path.exists(workflow_path):
raise Exception(f"Workflow file not found: {req.workflow_json}")
with open(workflow_path, 'r', encoding='utf-8') as f:
workflow = json.load(f)
seed = random.randint(1, 10**15)
# 参数注入
# 基础参数兼容 (针对 Z-Image.json)
if "23" in workflow and req.prompt:
workflow["23"]["inputs"]["text"] = req.prompt
if "144" in workflow:
workflow["144"]["inputs"]["width"] = req.width
workflow["144"]["inputs"]["height"] = req.height
if "22" in workflow:
workflow["22"]["inputs"]["seed"] = seed
# 兼容 Flux2-Klein 工作流
if "158" in workflow:
workflow["158"]["inputs"]["noise_seed"] = seed
for node_id in ["146", "181"]:
if node_id in workflow and "inputs" in workflow[node_id] and "seed" in workflow[node_id]["inputs"]:
workflow[node_id]["inputs"]["seed"] = seed
if "184" in workflow and "inputs" in workflow["184"] and "seed" in workflow["184"]["inputs"]:
workflow["184"]["inputs"]["seed"] = seed
if "172" in workflow and "inputs" in workflow["172"] and "seed" in workflow["172"]["inputs"]:
# SeedVR2VideoUpscaler 限制 seed 最大为 2^32 - 1
workflow["172"]["inputs"]["seed"] = seed % 4294967295
if "14" in workflow and "inputs" in workflow["14"] and "seed" in workflow["14"]["inputs"]:
workflow["14"]["inputs"]["seed"] = seed
# 动态参数注入 (支持所有工作流)
for node_id, node_inputs in req.params.items():
if node_id in workflow:
if "inputs" not in workflow[node_id]:
workflow[node_id]["inputs"] = {}
for input_name, value in node_inputs.items():
workflow[node_id]["inputs"][input_name] = value
# 提交任务
p = {"prompt": workflow, "client_id": CLIENT_ID}
data = json.dumps(p).encode('utf-8')
try:
post_req = urllib.request.Request(f"http://{target_backend}/prompt", data=data)
prompt_id = json.loads(urllib.request.urlopen(post_req, timeout=10).read())['prompt_id']
except urllib.error.HTTPError as e:
error_body = e.read().decode('utf-8')
print(f"ComfyUI API Error ({e.code}): {error_body}")
raise Exception(f"HTTP Error {e.code}: {error_body}")
except Exception as e:
raise e
# 轮询结果
history_data = None
for i in range(300): # 最多等待300秒 (5分钟)
try:
res = get_comfy_history(target_backend, prompt_id)
if prompt_id in res:
history_data = res[prompt_id]
break
except Exception as e:
pass
time.sleep(1)
if not history_data:
raise Exception("ComfyUI 渲染超时")
# 处理图片:下载到本地
local_urls = []
current_timestamp = time.time()
if 'outputs' in history_data:
for node_id in history_data['outputs']:
node_output = history_data['outputs'][node_id]
if 'images' in node_output:
for img in node_output['images']:
comfy_url_path = f"/view?filename={img['filename']}&subfolder={img['subfolder']}&type={img['type']}"
# 搬家:从 ComfyUI 下载到我们的 output
# 增加文件名标识
prefix = f"{req.type}_{int(current_timestamp)}_"
local_path = download_image(target_backend, comfy_url_path, prefix=prefix)
local_urls.append(local_path)
# 存储并返回
result = {
"prompt": req.prompt if req.prompt else "Detail Enhance", # 默认标题
"images": local_urls,
"seed": seed,
"timestamp": current_timestamp,
"type": req.type, # 存储类型
"params": req.params # 存储参数以支持“做同款”
}
save_to_history(result)
# 广播新图片
if GLOBAL_LOOP:
asyncio.run_coroutine_threadsafe(manager.broadcast_new_image(result), GLOBAL_LOOP)
return result
except Exception as e:
return {"images": [], "error": str(e)}
finally:
# 减少本地负载计数
if target_backend:
with LOAD_LOCK:
if BACKEND_LOCAL_LOAD.get(target_backend, 0) > 0:
BACKEND_LOCAL_LOAD[target_backend] -= 1
# 任务结束(无论成功失败),移除队列
if current_task:
with QUEUE_LOCK:
if current_task in QUEUE:
QUEUE.remove(current_task)
if __name__ == "__main__":
import uvicorn
# 强制单进程模式确保 WebSocket 计数准确
uvicorn.run(app, host="0.0.0.0", port=7860)
|