zixinz commited on
Commit
5f25c59
·
1 Parent(s): 6f74ce3

chore: ignore pyc and __pycache__

Browse files
Files changed (1) hide show
  1. app.py +34 -1
app.py CHANGED
@@ -138,7 +138,40 @@ def get_pipe() -> FluxFillPipeline:
138
  "or pre-download to a local cache directory."
139
  ) from e
140
 
141
- # ……你原有的 LoRA 加载逻辑……
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
  _PIPE = pipe
143
  return pipe
144
 
 
138
  "or pre-download to a local cache directory."
139
  ) from e
140
 
141
+ # -------- LoRA (stage1) --------
142
+ lora_dir = CODE_EDIT / "stage1" / "checkpoint-4800"
143
+ lora_file = "pytorch_lora_weights.safetensors" # 你的实际文件名
144
+ adapter_name = "stage1"
145
+
146
+ if lora_dir.exists():
147
+ try:
148
+ import peft # just to assert backend is present
149
+ print(f"[pipe] loading LoRA from: {lora_dir}/{lora_file}")
150
+ pipe.load_lora_weights(
151
+ str(lora_dir),
152
+ weight_name=lora_file, # 关键:指定文件名
153
+ adapter_name=adapter_name # 给一个可切换的名字
154
+ )
155
+ # 新版 diffusers:优先 set_adapters
156
+ try:
157
+ pipe.set_adapters(adapter_name, scale=1.0)
158
+ print(f"[pipe] set_adapters('{adapter_name}', scale=1.0)")
159
+ except Exception as e_set:
160
+ print(f"[pipe] set_adapters not available ({e_set}); trying fuse_lora()")
161
+ # 旧版/或不支持 set_adapters 的 pipeline:融合 LoRA
162
+ try:
163
+ pipe.fuse_lora(lora_scale=1.0)
164
+ print("[pipe] fuse_lora(lora_scale=1.0) done")
165
+ except Exception as e_fuse:
166
+ print(f"[pipe] fuse_lora failed: {e_fuse}")
167
+ print("[pipe] LoRA ready ✅")
168
+ except ImportError:
169
+ print("[pipe] peft not installed; LoRA will be skipped (add `peft>=0.11` to requirements).")
170
+ except Exception as e:
171
+ print(f"[pipe] load_lora_weights failed (continue without): {e}")
172
+ else:
173
+ print(f"[pipe] LoRA path not found: {lora_dir} (continue without)")
174
+
175
  _PIPE = pipe
176
  return pipe
177