Skip to content

Commit

Permalink
Merge branch 'master' into beta
Browse files Browse the repository at this point in the history
  • Loading branch information
jn-jairo committed Nov 1, 2023
2 parents 9a8b1bd + e73ec8c commit 9f13db8
Show file tree
Hide file tree
Showing 12 changed files with 171 additions and 2,254 deletions.
14 changes: 13 additions & 1 deletion comfy/controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ def __init__(self, control_model, global_average_pooling=False, device=None):
self.control_model = control_model
self.control_model_wrapped = comfy.model_patcher.ModelPatcher(self.control_model, load_device=comfy.model_management.get_torch_device(), offload_device=comfy.model_management.unet_offload_device())
self.global_average_pooling = global_average_pooling
self.model_sampling_current = None

def get_control(self, x_noisy, t, cond, batched_number):
control_prev = None
Expand Down Expand Up @@ -159,7 +160,10 @@ def get_control(self, x_noisy, t, cond, batched_number):
y = cond.get('y', None)
if y is not None:
y = y.to(self.control_model.dtype)
control = self.control_model(x=x_noisy.to(self.control_model.dtype), hint=self.cond_hint, timesteps=t, context=context.to(self.control_model.dtype), y=y)
timestep = self.model_sampling_current.timestep(t)
x_noisy = self.model_sampling_current.calculate_input(t, x_noisy)

control = self.control_model(x=x_noisy.to(self.control_model.dtype), hint=self.cond_hint, timesteps=timestep.float(), context=context.to(self.control_model.dtype), y=y)
return self.control_merge(None, control, control_prev, output_dtype)

def copy(self):
Expand All @@ -172,6 +176,14 @@ def get_models(self):
out.append(self.control_model_wrapped)
return out

def pre_run(self, model, percent_to_timestep_function):
super().pre_run(model, percent_to_timestep_function)
self.model_sampling_current = model.model_sampling

def cleanup(self):
self.model_sampling_current = None
super().cleanup()

class ControlLoraOps:
class Linear(torch.nn.Module):
def __init__(self, in_features: int, out_features: int, bias: bool = True,
Expand Down
8 changes: 7 additions & 1 deletion comfy/extra_samplers/uni_pc.py
Original file line number Diff line number Diff line change
Expand Up @@ -852,6 +852,12 @@ def marginal_lambda(self, t):
log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff))
return log_mean_coeff - log_std

def predict_eps_sigma(model, input, sigma_in, **kwargs):
sigma = sigma_in.view(sigma_in.shape[:1] + (1,) * (input.ndim - 1))
input = input * ((sigma ** 2 + 1.0) ** 0.5)
return (input - model(input, sigma_in, **kwargs)) / sigma


def sample_unipc(model, noise, image, sigmas, sampling_function, max_denoise, extra_args=None, callback=None, disable=False, noise_mask=None, variant='bh1'):
timesteps = sigmas.clone()
if sigmas[-1] == 0:
Expand All @@ -874,7 +880,7 @@ def sample_unipc(model, noise, image, sigmas, sampling_function, max_denoise, ex
model_type = "noise"

model_fn = model_wrapper(
model.predict_eps_sigma,
lambda input, sigma, **kwargs: predict_eps_sigma(model, input, sigma, **kwargs),
ns,
model_type=model_type,
guidance_type="uncond",
Expand Down
194 changes: 0 additions & 194 deletions comfy/k_diffusion/external.py

This file was deleted.

Empty file.
Loading

0 comments on commit 9f13db8

Please sign in to comment.