ZhouZJ36DL commited on
Commit
02fd279
·
1 Parent(s): ef45452
app.py CHANGED
@@ -72,6 +72,10 @@ class FluxEditor:
72
  self.ae.eval()
73
  self.model.eval()
74
 
 
 
 
 
75
  # clear history
76
  if os.path.exists("history_gradio/history.safetensors"):
77
  os.remove("history_gradio/history.safetensors")
 
72
  self.ae.eval()
73
  self.model.eval()
74
 
75
+ print(f"Inital_t5_device: {self.t5.hf_module.device}")
76
+ print(f"Inital_clip_device: {self.clip.hf_module.device}")
77
+ print(f"Inital_flow_model: {self.model.img_in.weight.device}")
78
+
79
  # clear history
80
  if os.path.exists("history_gradio/history.safetensors"):
81
  os.remove("history_gradio/history.safetensors")
src/flux/__pycache__/__init__.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/__init__.cpython-310.pyc and b/src/flux/__pycache__/__init__.cpython-310.pyc differ
 
src/flux/__pycache__/_version.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/_version.cpython-310.pyc and b/src/flux/__pycache__/_version.cpython-310.pyc differ
 
src/flux/__pycache__/math.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/math.cpython-310.pyc and b/src/flux/__pycache__/math.cpython-310.pyc differ
 
src/flux/__pycache__/model.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/model.cpython-310.pyc and b/src/flux/__pycache__/model.cpython-310.pyc differ
 
src/flux/__pycache__/sampling.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/sampling.cpython-310.pyc and b/src/flux/__pycache__/sampling.cpython-310.pyc differ
 
src/flux/__pycache__/util.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/util.cpython-310.pyc and b/src/flux/__pycache__/util.cpython-310.pyc differ
 
src/flux/modules/__pycache__/autoencoder.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/autoencoder.cpython-310.pyc and b/src/flux/modules/__pycache__/autoencoder.cpython-310.pyc differ
 
src/flux/modules/__pycache__/conditioner.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/conditioner.cpython-310.pyc and b/src/flux/modules/__pycache__/conditioner.cpython-310.pyc differ
 
src/flux/modules/__pycache__/layers.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/layers.cpython-310.pyc and b/src/flux/modules/__pycache__/layers.cpython-310.pyc differ
 
src/flux/sampling.py CHANGED
@@ -38,6 +38,9 @@ def prepare(t5: HFEmbedder, clip: HFEmbedder, img: Tensor, prompt: str | list[st
38
  if vec.shape[0] == 1 and bs > 1:
39
  vec = repeat(vec, "1 ... -> bs ...", bs=bs)
40
 
 
 
 
41
  return {
42
  "img": img,
43
  "img_ids": img_ids.to(img.device),
 
38
  if vec.shape[0] == 1 and bs > 1:
39
  vec = repeat(vec, "1 ... -> bs ...", bs=bs)
40
 
41
+ print(f"prepare t5 embedding: {txt}")
42
+ print(f"prepare clip embedding: {vec}")
43
+
44
  return {
45
  "img": img,
46
  "img_ids": img_ids.to(img.device),
src/flux/util.py CHANGED
@@ -132,7 +132,7 @@ def load_flow_model(name: str, device: str | torch.device = "cuda", hf_download:
132
  def load_t5(device: str | torch.device = "cuda", max_length: int = 77) -> HFEmbedder:
133
  # max length 64, 128, 256 and 512 should work (if your sequence is short enough)
134
  return HFEmbedder("director432/Flux1-T5Encoder", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
135
- #return HFEmbedder("/homedata/HuggingFace/black-forest-labs/FLUX.1-dev/text_encoder_2", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
136
 
137
 
138
  def load_clip(device: str | torch.device = "cuda") -> HFEmbedder:
 
132
  def load_t5(device: str | torch.device = "cuda", max_length: int = 77) -> HFEmbedder:
133
  # max length 64, 128, 256 and 512 should work (if your sequence is short enough)
134
  return HFEmbedder("director432/Flux1-T5Encoder", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
135
+ #return HFEmbedder("/homedata/HuggingFace/black-forest-labs/print.1-dev/text_encoder_2", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
136
 
137
 
138
  def load_clip(device: str | torch.device = "cuda") -> HFEmbedder: