Spaces:
Running
on
Zero
Running
on
Zero
zixinz
commited on
Commit
·
6f74ce3
1
Parent(s):
41300b4
chore: ignore pyc and __pycache__
Browse files
app.py
CHANGED
|
@@ -99,22 +99,46 @@ def get_pipe() -> FluxFillPipeline:
|
|
| 99 |
global _PIPE
|
| 100 |
if _PIPE is not None:
|
| 101 |
return _PIPE
|
|
|
|
| 102 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 103 |
dtype = torch.bfloat16 if device == "cuda" else torch.float32
|
| 104 |
-
print(f"[pipe] load FLUX.1-Fill-dev dtype={dtype}, device={device}")
|
| 105 |
-
pipe = FluxFillPipeline.from_pretrained("black-forest-labs/FLUX.1-Fill-dev", torch_dtype=dtype).to(device)
|
| 106 |
-
|
| 107 |
-
# LoRA(stage1)
|
| 108 |
-
lora_dir = CODE_EDIT / "stage1" / "checkpoint-4800"
|
| 109 |
-
if lora_dir.exists():
|
| 110 |
-
try:
|
| 111 |
-
pipe.load_lora_weights(str(lora_dir)) # 需要 peft
|
| 112 |
-
print(f"[pipe] loaded LoRA from: {lora_dir}")
|
| 113 |
-
except Exception as e:
|
| 114 |
-
print(f"[pipe] load LoRA failed (continue without): {e}")
|
| 115 |
-
else:
|
| 116 |
-
print(f"[pipe] LoRA path not found: {lora_dir} (continue without)")
|
| 117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
_PIPE = pipe
|
| 119 |
return pipe
|
| 120 |
|
|
|
|
| 99 |
global _PIPE
|
| 100 |
if _PIPE is not None:
|
| 101 |
return _PIPE
|
| 102 |
+
|
| 103 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 104 |
dtype = torch.bfloat16 if device == "cuda" else torch.float32
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 105 |
|
| 106 |
+
|
| 107 |
+
local_flux = BASE_DIR / "code_edit" / "flux_cache"
|
| 108 |
+
use_local = local_flux.exists()
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
try:
|
| 115 |
+
from huggingface_hub import hf_hub_enable_hf_transfer
|
| 116 |
+
hf_hub_enable_hf_transfer()
|
| 117 |
+
except Exception:
|
| 118 |
+
pass
|
| 119 |
+
|
| 120 |
+
print(f"[pipe] loading FLUX.1-Fill-dev (dtype={dtype}, device={device}, local={use_local})")
|
| 121 |
+
try:
|
| 122 |
+
if use_local:
|
| 123 |
+
pipe = FluxFillPipeline.from_pretrained(
|
| 124 |
+
local_flux, torch_dtype=dtype
|
| 125 |
+
).to(device)
|
| 126 |
+
else:
|
| 127 |
+
# 在线拉取(需要 gated 访问 + token)
|
| 128 |
+
pipe = FluxFillPipeline.from_pretrained(
|
| 129 |
+
"black-forest-labs/FLUX.1-Fill-dev",
|
| 130 |
+
torch_dtype=dtype,
|
| 131 |
+
token=hf_token,
|
| 132 |
+
# use_auth_token=hf_token,
|
| 133 |
+
).to(device)
|
| 134 |
+
except Exception as e:
|
| 135 |
+
raise RuntimeError(
|
| 136 |
+
"Failed to load FLUX.1-Fill-dev. "
|
| 137 |
+
"Make sure your account has access to the gated repo and HF_TOKEN is set as a Space secret, "
|
| 138 |
+
"or pre-download to a local cache directory."
|
| 139 |
+
) from e
|
| 140 |
+
|
| 141 |
+
# ……你原有的 LoRA 加载逻辑……
|
| 142 |
_PIPE = pipe
|
| 143 |
return pipe
|
| 144 |
|