ZhouZJ36DL commited on
Commit
787b33a
·
1 Parent(s): f069c07

modified: src/flux/util.py

Browse files
src/flux/__pycache__/__init__.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/__init__.cpython-310.pyc and b/src/flux/__pycache__/__init__.cpython-310.pyc differ
 
src/flux/__pycache__/_version.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/_version.cpython-310.pyc and b/src/flux/__pycache__/_version.cpython-310.pyc differ
 
src/flux/__pycache__/math.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/math.cpython-310.pyc and b/src/flux/__pycache__/math.cpython-310.pyc differ
 
src/flux/__pycache__/model.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/model.cpython-310.pyc and b/src/flux/__pycache__/model.cpython-310.pyc differ
 
src/flux/__pycache__/sampling.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/sampling.cpython-310.pyc and b/src/flux/__pycache__/sampling.cpython-310.pyc differ
 
src/flux/__pycache__/util.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/util.cpython-310.pyc and b/src/flux/__pycache__/util.cpython-310.pyc differ
 
src/flux/modules/__pycache__/autoencoder.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/autoencoder.cpython-310.pyc and b/src/flux/modules/__pycache__/autoencoder.cpython-310.pyc differ
 
src/flux/modules/__pycache__/conditioner.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/conditioner.cpython-310.pyc and b/src/flux/modules/__pycache__/conditioner.cpython-310.pyc differ
 
src/flux/modules/__pycache__/layers.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/layers.cpython-310.pyc and b/src/flux/modules/__pycache__/layers.cpython-310.pyc differ
 
src/flux/util.py CHANGED
@@ -129,7 +129,7 @@ def load_flow_model(name: str, device: str | torch.device = "cuda", hf_download:
129
  return model
130
 
131
 
132
- def load_t5(device: str | torch.device = "cuda", max_length: int = 512) -> HFEmbedder:
133
  # max length 64, 128, 256 and 512 should work (if your sequence is short enough)
134
  return HFEmbedder("google/t5-v1_1-xl", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
135
  #return HFEmbedder("/homedata/HuggingFace/black-forest-labs/FLUX.1-dev/text_encoder_2", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
 
129
  return model
130
 
131
 
132
+ def load_t5(device: str | torch.device = "cuda", max_length: int = 77) -> HFEmbedder:
133
  # max length 64, 128, 256 and 512 should work (if your sequence is short enough)
134
  return HFEmbedder("google/t5-v1_1-xl", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)
135
  #return HFEmbedder("/homedata/HuggingFace/black-forest-labs/FLUX.1-dev/text_encoder_2", max_length=max_length, is_clip=False, torch_dtype=torch.bfloat16).to(device)