primerz commited on
Commit
e1062d1
·
verified ·
1 Parent(s): 6a62352

Update generator.py

Browse files
Files changed (1) hide show
  1. generator.py +14 -1
generator.py CHANGED
@@ -840,7 +840,20 @@ class RetroArtConverter:
840
 
841
  combined_embeds = torch.cat([original_embeds, face_proj_embeds], dim=1)
842
  pipe_kwargs['prompt_embeds'] = combined_embeds
843
- print(f" [OK] Face embeddings concatenated successfully! New shape: {combined_embeds.shape}")
 
 
 
 
 
 
 
 
 
 
 
 
 
844
  else:
845
  print(f" [WARNING] Can't concatenate - no prompt_embeds (use Compel)")
846
 
 
840
 
841
  combined_embeds = torch.cat([original_embeds, face_proj_embeds], dim=1)
842
  pipe_kwargs['prompt_embeds'] = combined_embeds
843
+
844
+ # FIX: Also pad negative_prompt_embeds to match the new shape
845
+ if 'negative_prompt_embeds' in pipe_kwargs:
846
+ negative_embeds = pipe_kwargs['negative_prompt_embeds']
847
+ # Create zero padding with same shape as face_proj_embeds
848
+ padding = torch.zeros(
849
+ (negative_embeds.shape[0], face_proj_embeds.shape[1], negative_embeds.shape[2]),
850
+ device=negative_embeds.device,
851
+ dtype=negative_embeds.dtype
852
+ )
853
+ pipe_kwargs['negative_prompt_embeds'] = torch.cat([negative_embeds, padding], dim=1)
854
+ print(f" [OK] Negative prompt padded to match: {pipe_kwargs['negative_prompt_embeds'].shape}")
855
+
856
+ print(f" [OK] Face embeddings concatenated successfully! Prompt: {combined_embeds.shape}, Negative: {pipe_kwargs['negative_prompt_embeds'].shape}")
857
  else:
858
  print(f" [WARNING] Can't concatenate - no prompt_embeds (use Compel)")
859