Skip to content

Commit 887de74

Browse files
committed
Fix Cascade overcooking with more than 75 tokens
1 parent 885f272 commit 887de74

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

modules/prompt_parser_diffusers.py

+1
Original file line numberDiff line numberDiff line change
@@ -230,6 +230,7 @@ def pad_to_same_length(pipe, embeds):
230230
try:
231231
if getattr(pipe, "prior_pipe", None) and getattr(pipe.prior_pipe, "text_encoder", None) is not None: # Cascade
232232
empty_embed = pipe.prior_pipe.encode_prompt(device, 1, 1, False, "")
233+
empty_embed = [torch.zeros(empty_embed[0].shape, device=empty_embed[0].device, dtype=empty_embed[0].dtype)]
233234
else: # SDXL
234235
empty_embed = pipe.encode_prompt("")
235236
except TypeError: # SD1.5

0 commit comments

Comments
 (0)