Skip to content

Commit

Permalink
Merge branch 'master' into beta
Browse files Browse the repository at this point in the history
  • Loading branch information
jn-jairo committed Apr 17, 2024
2 parents 8f9a3ff + 072e3bd commit 3c48253
Show file tree
Hide file tree
Showing 7 changed files with 138 additions and 20 deletions.
31 changes: 21 additions & 10 deletions comfy/model_patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,26 @@ def apply_weight_decompose(dora_scale, weight):

return weight * (dora_scale / weight_norm)

def set_model_options_patch_replace(model_options, patch, name, block_name, number, transformer_index=None):
to = model_options["transformer_options"].copy()

if "patches_replace" not in to:
to["patches_replace"] = {}
else:
to["patches_replace"] = to["patches_replace"].copy()

if name not in to["patches_replace"]:
to["patches_replace"][name] = {}
else:
to["patches_replace"][name] = to["patches_replace"][name].copy()

if transformer_index is not None:
block = (block_name, number, transformer_index)
else:
block = (block_name, number)
to["patches_replace"][name][block] = patch
model_options["transformer_options"] = to
return model_options

class ModelPatcher:
def __init__(self, model, load_device, offload_device, size=0, current_device=None, weight_inplace_update=False):
Expand Down Expand Up @@ -109,16 +129,7 @@ def set_model_patch(self, patch, name):
to["patches"][name] = to["patches"].get(name, []) + [patch]

def set_model_patch_replace(self, patch, name, block_name, number, transformer_index=None):
to = self.model_options["transformer_options"]
if "patches_replace" not in to:
to["patches_replace"] = {}
if name not in to["patches_replace"]:
to["patches_replace"][name] = {}
if transformer_index is not None:
block = (block_name, number, transformer_index)
else:
block = (block_name, number)
to["patches_replace"][name][block] = patch
self.model_options = set_model_options_patch_replace(self.model_options, patch, name, block_name, number, transformer_index=transformer_index)

def set_model_attn1_patch(self, patch):
self.set_model_patch(patch, "attn1_patch")
Expand Down
1 change: 0 additions & 1 deletion comfy_extras/nodes_canny.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#From https://github.com/kornia/kornia
import math

import torch
Expand Down
56 changes: 56 additions & 0 deletions comfy_extras/nodes_pag.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#Modified/simplified version of the node from: https://github.com/pamparamm/sd-perturbed-attention
#If you want the one with more options see the above repo.

#My modified one here is more basic but has less chances of breaking with ComfyUI updates.

import comfy.model_patcher
import comfy.samplers

class PerturbedAttentionGuidance:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"model": ("MODEL",),
"scale": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0, "step": 0.1, "round": 0.01}),
}
}

RETURN_TYPES = ("MODEL",)
FUNCTION = "patch"

CATEGORY = "_for_testing"

def patch(self, model, scale):
unet_block = "middle"
unet_block_id = 0
m = model.clone()

def perturbed_attention(q, k, v, extra_options, mask=None):
return v

def post_cfg_function(args):
model = args["model"]
cond_pred = args["cond_denoised"]
cond = args["cond"]
cfg_result = args["denoised"]
sigma = args["sigma"]
model_options = args["model_options"].copy()
x = args["input"]

if scale == 0:
return cfg_result

# Replace Self-attention with PAG
model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, perturbed_attention, "attn1", unet_block, unet_block_id)
(pag,) = comfy.samplers.calc_cond_batch(model, [cond], x, sigma, model_options)

return cfg_result + (cond_pred - pag) * scale

m.set_model_sampler_post_cfg_function(post_cfg_function)

return (m,)

NODE_CLASS_MAPPINGS = {
"PerturbedAttentionGuidance": PerturbedAttentionGuidance,
}
62 changes: 54 additions & 8 deletions comfy_extras/nodes_perpneg.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,18 @@
import comfy.sampler_helpers
import comfy.samplers
import comfy.utils
import node_helpers

def perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_nocond, neg_scale, cond_scale):
pos = noise_pred_pos - noise_pred_nocond
neg = noise_pred_neg - noise_pred_nocond

#TODO: This node should be removed and replaced with one that uses the new Guider/SamplerCustomAdvanced.
perp = neg - ((torch.mul(neg, pos).sum())/(torch.norm(pos)**2)) * pos
perp_neg = perp * neg_scale
cfg_result = noise_pred_nocond + cond_scale*(pos - perp_neg)
return cfg_result

#TODO: This node should be removed, it has been replaced with PerpNegGuider
class PerpNeg:
@classmethod
def INPUT_TYPES(s):
Expand Down Expand Up @@ -34,23 +43,60 @@ def cfg_function(args):

(noise_pred_nocond,) = comfy.samplers.calc_cond_batch(model, [nocond_processed], x, sigma, model_options)

pos = noise_pred_pos - noise_pred_nocond
neg = noise_pred_neg - noise_pred_nocond
perp = neg - ((torch.mul(neg, pos).sum())/(torch.norm(pos)**2)) * pos
perp_neg = perp * neg_scale
cfg_result = noise_pred_nocond + cond_scale*(pos - perp_neg)
cfg_result = x - cfg_result
cfg_result = x - perp_neg(x, noise_pred_pos, noise_pred_neg, noise_pred_nocond, neg_scale, cond_scale)
return cfg_result

m.set_model_sampler_cfg_function(cfg_function)

return (m, )


class Guider_PerpNeg(comfy.samplers.CFGGuider):
def set_conds(self, positive, negative, empty_negative_prompt):
empty_negative_prompt = node_helpers.conditioning_set_values(empty_negative_prompt, {"prompt_type": "negative"})
self.inner_set_conds({"positive": positive, "empty_negative_prompt": empty_negative_prompt, "negative": negative})

def set_cfg(self, cfg, neg_scale):
self.cfg = cfg
self.neg_scale = neg_scale

def predict_noise(self, x, timestep, model_options={}, seed=None):
positive_cond = self.conds.get("positive", None)
negative_cond = self.conds.get("negative", None)
empty_cond = self.conds.get("empty_negative_prompt", None)

out = comfy.samplers.calc_cond_batch(self.inner_model, [negative_cond, positive_cond, empty_cond], x, timestep, model_options)
return perp_neg(x, out[1], out[0], out[2], self.neg_scale, self.cfg)

class PerpNegGuider:
@classmethod
def INPUT_TYPES(s):
return {"required":
{"model": ("MODEL",),
"positive": ("CONDITIONING", ),
"negative": ("CONDITIONING", ),
"empty_conditioning": ("CONDITIONING", ),
"cfg": ("FLOAT", {"default": 8.0, "min": 0.0, "max": 100.0, "step":0.1, "round": 0.01}),
"neg_scale": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}),
}
}

RETURN_TYPES = ("GUIDER",)

FUNCTION = "get_guider"
CATEGORY = "_for_testing"

def get_guider(self, model, positive, negative, empty_conditioning, cfg, neg_scale):
guider = Guider_PerpNeg(model)
guider.set_conds(positive, negative, empty_conditioning)
guider.set_cfg(cfg, neg_scale)
return (guider,)

NODE_CLASS_MAPPINGS = {
"PerpNeg": PerpNeg,
"PerpNegGuider": PerpNegGuider,
}

NODE_DISPLAY_NAME_MAPPINGS = {
"PerpNeg": "Perp-Neg",
"PerpNeg": "Perp-Neg (DEPRECATED by PerpNegGuider)",
}
2 changes: 2 additions & 0 deletions folder_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,8 @@ def get_full_path(folder_name, filename):
full_path = os.path.join(x, filename)
if os.path.isfile(full_path):
return full_path
elif os.path.islink(full_path):
logging.warning("WARNING path {} exists but doesn't link anywhere, skipping.".format(full_path))

return None

Expand Down
1 change: 1 addition & 0 deletions nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1942,6 +1942,7 @@ def init_custom_nodes():
"nodes_differential_diffusion.py",
"nodes_ip2p.py",
"nodes_model_merging_model_specific.py",
"nodes_pag.py",
]

import_failed = []
Expand Down
5 changes: 4 additions & 1 deletion web/scripts/ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -90,12 +90,15 @@ function dragElement(dragEl, settings) {
}).observe(dragEl);

function ensureInBounds() {
if (dragEl.classList.contains("comfy-menu-manual-pos")) {
try {
newPosX = Math.min(document.body.clientWidth - dragEl.clientWidth, Math.max(0, dragEl.offsetLeft));
newPosY = Math.min(document.body.clientHeight - dragEl.clientHeight, Math.max(0, dragEl.offsetTop));

positionElement();
}
catch(exception){
// robust
}
}

function positionElement() {
Expand Down

0 comments on commit 3c48253

Please sign in to comment.