Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
5960a43
1
Parent(s):
6179ccd
load_t5
Browse files- app.py +2 -1
- src/flux/util.py +1 -1
app.py
CHANGED
|
@@ -352,7 +352,8 @@ class FluxEditor:
|
|
| 352 |
with torch.no_grad():
|
| 353 |
z, info = denoise_func(self.model, **inp, timesteps=timesteps, guidance=1, inverse=True, info=info)
|
| 354 |
|
| 355 |
-
|
|
|
|
| 356 |
|
| 357 |
|
| 358 |
#----------------------------- 2 history_tensors used to implement dual-LQR guiding editing -------------------------------------#
|
|
|
|
| 352 |
with torch.no_grad():
|
| 353 |
z, info = denoise_func(self.model, **inp, timesteps=timesteps, guidance=1, inverse=True, info=info)
|
| 354 |
|
| 355 |
+
print(z)
|
| 356 |
+
print(info)
|
| 357 |
|
| 358 |
|
| 359 |
#----------------------------- 2 history_tensors used to implement dual-LQR guiding editing -------------------------------------#
|
src/flux/util.py
CHANGED
|
@@ -131,7 +131,7 @@ def load_flow_model(name: str, device: str | torch.device = "cuda", hf_download:
|
|
| 131 |
|
| 132 |
def load_t5(device: str | torch.device = "cuda", max_length: int = 77) -> HFEmbedder:
|
| 133 |
# max length 64, 128, 256 and 512 should work (if your sequence is short enough)
|
| 134 |
-
return HFEmbedder("
|
| 135 |
#return HFEmbedder("/homedata/HuggingFace/black-forest-labs/FLUX.1-dev/text_encoder_2", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
|
| 136 |
|
| 137 |
|
|
|
|
| 131 |
|
| 132 |
def load_t5(device: str | torch.device = "cuda", max_length: int = 77) -> HFEmbedder:
|
| 133 |
# max length 64, 128, 256 and 512 should work (if your sequence is short enough)
|
| 134 |
+
return HFEmbedder("director432/Flux1-T5Encoder", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
|
| 135 |
#return HFEmbedder("/homedata/HuggingFace/black-forest-labs/FLUX.1-dev/text_encoder_2", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
|
| 136 |
|
| 137 |
|