From ba9456ed0000af6eb65f8c42b20ca9a883a6e449 Mon Sep 17 00:00:00 2001 From: tazlin Date: Tue, 17 Sep 2024 09:30:35 -0400 Subject: [PATCH] fix: don't abandon job on bad lora file --- hordelib/nodes/node_lora_loader.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/hordelib/nodes/node_lora_loader.py b/hordelib/nodes/node_lora_loader.py index 6d817d9..ddbb00b 100644 --- a/hordelib/nodes/node_lora_loader.py +++ b/hordelib/nodes/node_lora_loader.py @@ -59,7 +59,6 @@ def load_lora(self, model, clip, lora_name, strength_model, strength_clip): return (model, clip) lora_path = folder_paths.get_full_path("loras", lora_name) - lora = None if self.loaded_lora is not None: if self.loaded_lora[0] == lora_path: @@ -69,13 +68,18 @@ def load_lora(self, model, clip, lora_name, strength_model, strength_clip): self.loaded_lora = None del temp - if lora is None: - lora = comfy.utils.load_torch_file(lora_path, safe_load=True) - self.loaded_lora = (lora_path, lora) - - model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip) - log_free_ram() - return (model_lora, clip_lora) + try: + with logger.catch(reraise=True): + if lora is None: + lora = comfy.utils.load_torch_file(lora_path, safe_load=True) + self.loaded_lora = (lora_path, lora) + + model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip) + log_free_ram() + return (model_lora, clip_lora) + except Exception as e: + logger.error(f"Error loading lora {lora_name}: {e}") + return (model, clip) NODE_CLASS_MAPPINGS = {"HordeLoraLoader": HordeLoraLoader}