Skip to content

Commit c909c11

Browse files
authored
Merge pull request #3326 from vladmandic/dev
merge
2 parents 74150db + 425c757 commit c909c11

File tree

1 file changed

+13
-10
lines changed

1 file changed

+13
-10
lines changed

modules/textual_inversion/textual_inversion.py

+13-10
Original file line numberDiff line numberDiff line change
@@ -283,16 +283,19 @@ def load_diffusers_embedding(self, filename: Union[str, List[str]] = None, data:
283283
if not all([text_encoders, tokenizers, hiddensizes]):
284284
return 0
285285
for embedding in embeddings:
286-
embedding.vector_sizes = [v.shape[-1] for v in embedding.vec]
287-
if shared.opts.diffusers_convert_embed and 768 in hiddensizes and 1280 in hiddensizes and 1280 not in embedding.vector_sizes and 768 in embedding.vector_sizes:
288-
embedding.vec.append(
289-
convert_embedding(embedding.vec[embedding.vector_sizes.index(768)], text_encoders[hiddensizes.index(768)],
290-
text_encoders[hiddensizes.index(1280)]))
291-
embedding.vector_sizes.append(1280)
292-
if (not all(vs in hiddensizes for vs in embedding.vector_sizes) or # Skip SD2.1 in SD1.5/SDXL/SD3 vis versa
293-
len(embedding.vector_sizes) > len(hiddensizes) or # Skip SDXL/SD3 in SD1.5
294-
(len(embedding.vector_sizes) < len(hiddensizes) and len(embedding.vector_sizes) != 2)): # SD3 no T5
295-
embedding.tokens = []
286+
try:
287+
embedding.vector_sizes = [v.shape[-1] for v in embedding.vec]
288+
if shared.opts.diffusers_convert_embed and 768 in hiddensizes and 1280 in hiddensizes and 1280 not in embedding.vector_sizes and 768 in embedding.vector_sizes:
289+
embedding.vec.append(
290+
convert_embedding(embedding.vec[embedding.vector_sizes.index(768)], text_encoders[hiddensizes.index(768)],
291+
text_encoders[hiddensizes.index(1280)]))
292+
embedding.vector_sizes.append(1280)
293+
if (not all(vs in hiddensizes for vs in embedding.vector_sizes) or # Skip SD2.1 in SD1.5/SDXL/SD3 vis versa
294+
len(embedding.vector_sizes) > len(hiddensizes) or # Skip SDXL/SD3 in SD1.5
295+
(len(embedding.vector_sizes) < len(hiddensizes) and len(embedding.vector_sizes) != 2)): # SD3 no T5
296+
embedding.tokens = []
297+
self.skipped_embeddings[embedding.name] = embedding
298+
except Exception:
296299
self.skipped_embeddings[embedding.name] = embedding
297300
if overwrite:
298301
shared.log.info(f"Loading Bundled embeddings: {list(data.keys())}")

0 commit comments

Comments
 (0)