primerz commited on
Commit
faa3c33
·
verified ·
1 Parent(s): 7f5c816

Update generator.py

Browse files
Files changed (1) hide show
  1. generator.py +15 -5
generator.py CHANGED
@@ -767,17 +767,16 @@ class RetroArtConverter:
767
  try:
768
  print("Encoding prompts with Compel...")
769
 
770
- # Pass both prompts as a list to be batched
771
  conditioning_batch, pooled_batch = self.compel([prompt, negative_prompt])
772
-
773
  # Unpack the batch results using slicing
774
- # [0:1] and [1:2] keeps the batch dimension, which is required
775
  pipe_kwargs["prompt_embeds"] = conditioning_batch[0:1]
776
  pipe_kwargs["pooled_prompt_embeds"] = pooled_batch[0:1]
777
  pipe_kwargs["negative_prompt_embeds"] = conditioning_batch[1:2]
778
  pipe_kwargs["negative_pooled_prompt_embeds"] = pooled_batch[1:2]
779
 
780
- print("[OK] Using Compel-encoded prompts")
781
  except Exception as e:
782
  print(f"Compel encoding failed, using standard prompts: {e}")
783
  traceback.print_exc()
@@ -838,7 +837,18 @@ class RetroArtConverter:
838
 
839
  combined_embeds = torch.cat([original_embeds, face_proj_embeds], dim=1)
840
  pipe_kwargs['prompt_embeds'] = combined_embeds
841
- print(f" [OK] Face embeddings concatenated successfully! New shape: {combined_embeds.shape}")
 
 
 
 
 
 
 
 
 
 
 
842
  else:
843
  print(f" [WARNING] Can't concatenate - no prompt_embeds (use Compel)")
844
 
 
767
  try:
768
  print("Encoding prompts with Compel...")
769
 
770
+ # Pass both prompts as a list to be batched - Compel will pad them to match
771
  conditioning_batch, pooled_batch = self.compel([prompt, negative_prompt])
772
+
773
  # Unpack the batch results using slicing
 
774
  pipe_kwargs["prompt_embeds"] = conditioning_batch[0:1]
775
  pipe_kwargs["pooled_prompt_embeds"] = pooled_batch[0:1]
776
  pipe_kwargs["negative_prompt_embeds"] = conditioning_batch[1:2]
777
  pipe_kwargs["negative_pooled_prompt_embeds"] = pooled_batch[1:2]
778
 
779
+ print(f"[OK] Compel encoded - Prompt: {pipe_kwargs['prompt_embeds'].shape}, Negative: {pipe_kwargs['negative_prompt_embeds'].shape}")
780
  except Exception as e:
781
  print(f"Compel encoding failed, using standard prompts: {e}")
782
  traceback.print_exc()
 
837
 
838
  combined_embeds = torch.cat([original_embeds, face_proj_embeds], dim=1)
839
  pipe_kwargs['prompt_embeds'] = combined_embeds
840
+
841
+ # CRITICAL: Pad negative_prompt_embeds by the same amount
842
+ if 'negative_prompt_embeds' in pipe_kwargs:
843
+ negative_embeds = pipe_kwargs['negative_prompt_embeds']
844
+ neg_padding = torch.zeros(
845
+ negative_embeds.shape[0],
846
+ face_proj_embeds.shape[1],
847
+ negative_embeds.shape[2],
848
+ device=negative_embeds.device,
849
+ dtype=negative_embeds.dtype
850
+ )
851
+ pipe_kwargs['negative_prompt_embeds'] = torch.cat([negative_embeds, neg_padding], dim=1)
852
  else:
853
  print(f" [WARNING] Can't concatenate - no prompt_embeds (use Compel)")
854