Skip to content
This repository has been archived by the owner on Jan 9, 2024. It is now read-only.

Commit

Permalink
Merge pull request #182 from LucianoCirino/XY-Input-LoRA-Stacks-Fix
Browse files Browse the repository at this point in the history
XY Input LoRA Stacks Fix
  • Loading branch information
LucianoCirino authored Sep 19, 2023
2 parents 5c42d16 + 43d265f commit 6921c58
Showing 1 changed file with 26 additions and 17 deletions.
43 changes: 26 additions & 17 deletions efficiency_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -852,7 +852,7 @@ def process_xy_for_print(value, replacement, type_):
else (os.path.basename(v[0]), v[1]) if v[2] is None
else (os.path.basename(v[0]),) + v[1:] for v in value]

elif type_ == "LoRA" and isinstance(value, list):
elif (type_ == "LoRA" or type_ == "LoRA Stacks") and isinstance(value, list):
# Return only the first Tuple of each inner array
return [[(os.path.basename(v[0][0]),) + v[0][1:], "..."] if len(v) > 1
else [(os.path.basename(v[0][0]),) + v[0][1:]] for v in value]
Expand Down Expand Up @@ -953,6 +953,7 @@ def process_xy_for_print(value, replacement, type_):
"Checkpoint",
"Refiner",
"LoRA",
"LoRA Stacks",
"VAE",
]
conditioners = {
Expand Down Expand Up @@ -997,6 +998,9 @@ def process_xy_for_print(value, replacement, type_):
# Create a list of tuples with types and values
type_value_pairs = [(X_type, X_value.copy()), (Y_type, Y_value.copy())]

# Replace "LoRA Stacks" with "LoRA"
type_value_pairs = [('LoRA' if t == 'LoRA Stacks' else t, v) for t, v in type_value_pairs]

# Iterate over type-value pairs
for t, v in type_value_pairs:
if t in dict_map:
Expand Down Expand Up @@ -1039,7 +1043,7 @@ def process_xy_for_print(value, replacement, type_):
elif X_type == "Refiner":
ckpt_dict = []
lora_dict = []
elif X_type == "LoRA":
elif X_type in ("LoRA", "LoRA Stacks"):
ckpt_dict = []
refn_dict = []

Expand Down Expand Up @@ -1202,7 +1206,7 @@ def define_variable(var_type, var, add_noise, seed, steps, start_at_step, end_at
text = f"RefClipSkip ({refiner_clip_skip[0]})"

elif "LoRA" in var_type:
if not lora_stack:
if not lora_stack or var_type == "LoRA Stacks":
lora_stack = var.copy()
else:
# Updating the first tuple of lora_stack
Expand All @@ -1212,7 +1216,7 @@ def define_variable(var_type, var, add_noise, seed, steps, start_at_step, end_at
lora_name, lora_model_wt, lora_clip_wt = lora_stack[0]
lora_filename = os.path.splitext(os.path.basename(lora_name))[0]

if var_type == "LoRA":
if var_type == "LoRA" or var_type == "LoRA Stacks":
if len(lora_stack) == 1:
lora_model_wt = format(float(lora_model_wt), ".2f").rstrip('0').rstrip('.')
lora_clip_wt = format(float(lora_clip_wt), ".2f").rstrip('0').rstrip('.')
Expand Down Expand Up @@ -1335,7 +1339,7 @@ def define_model(model, clip, clip_skip, refiner_model, refiner_clip, refiner_cl
# Note: Index is held at 0 when Y_type == "Nothing"

# Load Checkpoint if required. If Y_type is LoRA, required models will be loaded by load_lora func.
if (X_type == "Checkpoint" and index == 0 and Y_type != "LoRA"):
if (X_type == "Checkpoint" and index == 0 and Y_type not in ("LoRA", "LoRA Stacks")):
if lora_stack is None:
model, clip, _ = load_checkpoint(ckpt_name, xyplot_id, cache=cache[1])
else: # Load Efficient Loader LoRA
Expand All @@ -1344,11 +1348,11 @@ def define_model(model, clip, clip_skip, refiner_model, refiner_clip, refiner_cl
encode = True

# Load LoRA if required
elif (X_type == "LoRA" and index == 0):
elif (X_type in ("LoRA", "LoRA Stacks") and index == 0):
# Don't cache Checkpoints
model, clip = load_lora(lora_stack, ckpt_name, xyplot_id, cache=cache[2])
encode = True
elif Y_type == "LoRA": # X_type must be Checkpoint, so cache those as defined
elif Y_type in ("LoRA", "LoRA Stacks"): # X_type must be Checkpoint, so cache those as defined
model, clip = load_lora(lora_stack, ckpt_name, xyplot_id,
cache=None, ckpt_cache=cache[1])
encode = True
Expand Down Expand Up @@ -1568,7 +1572,7 @@ def clone_or_none(*originals):
clear_cache_by_exception(xyplot_id, lora_dict=[], refn_dict=[])
elif X_type == "Refiner":
clear_cache_by_exception(xyplot_id, ckpt_dict=[], lora_dict=[])
elif X_type == "LoRA":
elif X_type in ("LoRA", "LoRA Stacks"):
clear_cache_by_exception(xyplot_id, ckpt_dict=[], refn_dict=[])

# __________________________________________________________________________________________________________
Expand Down Expand Up @@ -1668,7 +1672,7 @@ def get_lora_name(X_type, Y_type, X_value, Y_value, lora_stack=None):
lora_name = lora_wt = lora_model_str = lora_clip_str = None

# Check for all possible LoRA types
lora_types = ["LoRA", "LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr"]
lora_types = ["LoRA", "LoRA Stacks", "LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr"]

if X_type not in lora_types and Y_type not in lora_types:
if lora_stack:
Expand All @@ -1681,7 +1685,7 @@ def get_lora_name(X_type, Y_type, X_value, Y_value, lora_stack=None):
else:
if X_type in lora_types:
value = get_lora_sublist_name(X_type, X_value)
if X_type == "LoRA":
if X_type in ("LoRA", "LoRA Stacks"):
lora_name = value
lora_model_str = None
lora_clip_str = None
Expand All @@ -1703,7 +1707,7 @@ def get_lora_name(X_type, Y_type, X_value, Y_value, lora_stack=None):

if Y_type in lora_types:
value = get_lora_sublist_name(Y_type, Y_value)
if Y_type == "LoRA":
if Y_type in ("LoRA", "LoRA Stacks"):
lora_name = value
lora_model_str = None
lora_clip_str = None
Expand All @@ -1726,13 +1730,13 @@ def get_lora_name(X_type, Y_type, X_value, Y_value, lora_stack=None):
return lora_name, lora_wt, lora_model_str, lora_clip_str

def get_lora_sublist_name(lora_type, lora_value):
if lora_type == "LoRA" or lora_type == "LoRA Batch":
if lora_type in ("LoRA", "LoRA Batch", "LoRA Stacks"):
formatted_sublists = []
for sublist in lora_value:
formatted_entries = []
for x in sublist:
base_name = os.path.splitext(os.path.basename(str(x[0])))[0]
formatted_str = f"{base_name}({round(x[1], 3)},{round(x[2], 3)})" if lora_type == "LoRA" else f"{base_name}"
formatted_str = f"{base_name}({round(x[1], 3)},{round(x[2], 3)})" if lora_type in ("LoRA", "LoRA Stacks") else f"{base_name}"
formatted_entries.append(formatted_str)
formatted_sublists.append(f"{', '.join(formatted_entries)}")
return "\n ".join(formatted_sublists)
Expand Down Expand Up @@ -2375,7 +2379,7 @@ def XYplot(self, grid_spacing, XY_flip, Y_label_orientation, cache_models, ksamp
# Check that dependencies are connected for specific plot types
encode_types = {
"Checkpoint", "Refiner",
"LoRA", "LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr",
"LoRA", "LoRA Stacks", "LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr",
"Positive Prompt S/R", "Negative Prompt S/R",
"AScore+", "AScore-",
"Clip Skip", "Clip Skip (Refiner)",
Expand All @@ -2391,8 +2395,13 @@ def XYplot(self, grid_spacing, XY_flip, Y_label_orientation, cache_models, ksamp
# Check if both X_type and Y_type are special lora_types
lora_types = {"LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr"}
if (X_type in lora_types and Y_type not in lora_types) or (Y_type in lora_types and X_type not in lora_types):
print(
f"{error('XY Plot Error:')} Both X and Y must be connected to use the 'LoRA Plot' node.")
print(f"{error('XY Plot Error:')} Both X and Y must be connected to use the 'LoRA Plot' node.")
return (None,)

# Do not allow LoRA and LoRA Stacks
lora_types = {"LoRA", "LoRA Stacks"}
if (X_type in lora_types and Y_type in lora_types):
print(f"{error('XY Plot Error:')} X and Y input types must be different.")
return (None,)

# Clean Schedulers from Sampler data (if other type is Scheduler)
Expand Down Expand Up @@ -3139,7 +3148,7 @@ def INPUT_TYPES(cls):
CATEGORY = "Efficiency Nodes/XY Inputs"

def xy_value(self, node_state, lora_stack_1=None, lora_stack_2=None, lora_stack_3=None, lora_stack_4=None, lora_stack_5=None):
xy_type = "LoRA"
xy_type = "LoRA Stacks"
xy_value = [stack for stack in [lora_stack_1, lora_stack_2, lora_stack_3, lora_stack_4, lora_stack_5] if stack is not None]
if not xy_value or not any(xy_value) or node_state == "Disabled":
return (None,)
Expand Down

0 comments on commit 6921c58

Please sign in to comment.