Spaces:
Runtime error
Runtime error
Delete app.py
Browse files
app.py
DELETED
|
@@ -1,39 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
import gradio as gr
|
| 3 |
-
from huggingface_hub import snapshot_download
|
| 4 |
-
import torch
|
| 5 |
-
from diffusers import FluxPipeline, FluxTransformer2DModel
|
| 6 |
-
|
| 7 |
-
# 安装依赖 (在 Space 启动时自动跑)
|
| 8 |
-
os.system("pip install huggingface-hub transformers diffusers accelerate safetensors torch gradio")
|
| 9 |
-
|
| 10 |
-
# 下载 PosterCraft 模型
|
| 11 |
-
model_dir = "models/postercraft/postercraft_rl"
|
| 12 |
-
hf_token = os.environ.get("HF_TOKEN") # 从 Space secret 获取 Token
|
| 13 |
-
snapshot_download(repo_id="PosterCraft/PosterCraft-v1_RL", local_dir=model_dir, resume_download=True, token=hf_token)
|
| 14 |
-
|
| 15 |
-
# 加载模型 (官方方式, 需 Flux.1-dev 权限)
|
| 16 |
-
pipeline_id = "black-forest-labs/FLUX.1-dev"
|
| 17 |
-
dtype = torch.bfloat16
|
| 18 |
-
pipe = FluxPipeline.from_pretrained(pipeline_id, torch_dtype=dtype, token=hf_token) # 添加 token 认证
|
| 19 |
-
pipe.transformer = FluxTransformer2DModel.from_pretrained(model_dir, torch_dtype=dtype, token=hf_token) # 添加 token 认证
|
| 20 |
-
pipe.to("cuda" if torch.cuda.is_available() else "cpu")
|
| 21 |
-
|
| 22 |
-
# 生成函数
|
| 23 |
-
def generate_poster(prompt):
|
| 24 |
-
try:
|
| 25 |
-
image = pipe(prompt).images[0]
|
| 26 |
-
return image
|
| 27 |
-
except Exception as e:
|
| 28 |
-
return f"Error: {str(e)}" # 错误处理,便于调试
|
| 29 |
-
|
| 30 |
-
# Gradio UI (自定义界面)
|
| 31 |
-
iface = gr.Interface(
|
| 32 |
-
fn=generate_poster,
|
| 33 |
-
inputs=gr.Textbox(label="海报描述", placeholder="例如: WTCC 世界网球洲际对抗赛海报, 现代风格, 蓝色调"),
|
| 34 |
-
outputs=gr.Image(label="生成海报"),
|
| 35 |
-
title="PosterCraft 海报生成系统",
|
| 36 |
-
description="输入海报描述生成高品质海报。基于 PosterCraft-v1_RL 模型。"
|
| 37 |
-
)
|
| 38 |
-
|
| 39 |
-
iface.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|