ZhouZJ36DL commited on
Commit
6c83c25
·
1 Parent(s): 1654ad4

modified: src/flux/modules/conditioner.py

Browse files
src/flux/__pycache__/__init__.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/__init__.cpython-310.pyc and b/src/flux/__pycache__/__init__.cpython-310.pyc differ
 
src/flux/__pycache__/_version.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/_version.cpython-310.pyc and b/src/flux/__pycache__/_version.cpython-310.pyc differ
 
src/flux/__pycache__/math.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/math.cpython-310.pyc and b/src/flux/__pycache__/math.cpython-310.pyc differ
 
src/flux/__pycache__/model.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/model.cpython-310.pyc and b/src/flux/__pycache__/model.cpython-310.pyc differ
 
src/flux/__pycache__/sampling.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/sampling.cpython-310.pyc and b/src/flux/__pycache__/sampling.cpython-310.pyc differ
 
src/flux/__pycache__/util.cpython-310.pyc CHANGED
Binary files a/src/flux/__pycache__/util.cpython-310.pyc and b/src/flux/__pycache__/util.cpython-310.pyc differ
 
src/flux/modules/__pycache__/autoencoder.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/autoencoder.cpython-310.pyc and b/src/flux/modules/__pycache__/autoencoder.cpython-310.pyc differ
 
src/flux/modules/__pycache__/conditioner.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/conditioner.cpython-310.pyc and b/src/flux/modules/__pycache__/conditioner.cpython-310.pyc differ
 
src/flux/modules/__pycache__/layers.cpython-310.pyc CHANGED
Binary files a/src/flux/modules/__pycache__/layers.cpython-310.pyc and b/src/flux/modules/__pycache__/layers.cpython-310.pyc differ
 
src/flux/modules/conditioner.py CHANGED
@@ -31,9 +31,14 @@ class HFEmbedder(nn.Module):
31
  return_tensors="pt",
32
  )
33
 
 
 
 
 
 
34
  input_ids = batch_encoding["input_ids"]
35
  print(f"input_ids shape: {input_ids.shape}, max_length: {self.max_length}") # Debug
36
- assert input_ids.shape[1] == 77, f"Sequence length {input_ids.shape[1]} exceeds max_length 77"
37
 
38
  outputs = self.hf_module(
39
  input_ids=input_ids.to(self.hf_module.device),
 
31
  return_tensors="pt",
32
  )
33
 
34
+ if self.is_clip:
35
+ flag = 'clip'
36
+ else:
37
+ flag = 't5'
38
+ print('foward {flag}')
39
  input_ids = batch_encoding["input_ids"]
40
  print(f"input_ids shape: {input_ids.shape}, max_length: {self.max_length}") # Debug
41
+ assert input_ids.shape[1] == self.max_length, f"Sequence length {input_ids.shape[1]} does not match max_length {self.max_length}"
42
 
43
  outputs = self.hf_module(
44
  input_ids=input_ids.to(self.hf_module.device),