Skip to content

Commit

Permalink
Merge pull request #280 from Haidra-Org/main
Browse files Browse the repository at this point in the history
fix: hires now uses more optimal steps for second pass; feat: hires on layer diffuse; feat: comfyui `2dc84d14`
  • Loading branch information
tazlin authored Jul 8, 2024
2 parents 0d9ec71 + 77f5b4d commit f9b6bad
Show file tree
Hide file tree
Showing 24 changed files with 698 additions and 277 deletions.
18 changes: 8 additions & 10 deletions hordelib/comfy_horde.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,14 +213,7 @@ def do_comfy_import(

# comfy.model_management.unet_offload_device = _unet_offload_device_hijack

total_vram = get_torch_total_vram_mb()
total_ram = psutil.virtual_memory().total / (1024 * 1024)
free_ram = psutil.virtual_memory().available / (1024 * 1024)

free_vram = get_torch_free_vram_mb()

logger.debug(f"Total VRAM {total_vram:0.0f} MB, Total System RAM {total_ram:0.0f} MB")
logger.debug(f"Free VRAM {free_vram:0.0f} MB, Free System RAM {free_ram:0.0f} MB")
log_free_ram()
output_collector.replay()


Expand Down Expand Up @@ -308,6 +301,11 @@ def get_torch_free_vram_mb():
return round(_comfy_get_free_memory() / (1024 * 1024))


def log_free_ram():
logger.debug(f"Free VRAM: {get_torch_free_vram_mb():0.0f} MB")
logger.debug(f"Free RAM: {psutil.virtual_memory().available / (1024 * 1024):0.0f} MB")


class Comfy_Horde:
"""Handles horde-specific behavior against ComfyUI."""

Expand Down Expand Up @@ -446,7 +444,7 @@ def _this_dir(self, filename: str, subdir="") -> str:

def _load_custom_nodes(self) -> None:
"""Force ComfyUI to load its normal custom nodes and the horde custom nodes."""
_comfy_nodes.init_custom_nodes()
_comfy_nodes.init_extra_nodes(init_custom_nodes=True)

def _get_executor(self):
"""Return the ComfyUI PromptExecutor object."""
Expand Down Expand Up @@ -744,7 +742,7 @@ def _run_pipeline(
# if time.time() - self._gc_timer > Comfy_Horde.GC_TIME:
# self._gc_timer = time.time()
# garbage_collect()

log_free_ram()
return self.images

# Run a pipeline that returns an image in pixel space
Expand Down
2 changes: 1 addition & 1 deletion hordelib/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from hordelib.config_path import get_hordelib_path

COMFYUI_VERSION = "16eabdf70dbdb64dc4822908f0fe455c56d11ec3"
COMFYUI_VERSION = "2dc84d14447782683862616eaf8c19c0c1feacf3"
"""The exact version of ComfyUI version to load."""

REMOTE_PROXY = ""
Expand Down
66 changes: 59 additions & 7 deletions hordelib/horde.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from collections.abc import Callable
from copy import deepcopy
from enum import Enum, auto
from types import FunctionType

from horde_sdk.ai_horde_api.apimodels import ImageGenerateJobPopResponse
from horde_sdk.ai_horde_api.apimodels.base import (
Expand Down Expand Up @@ -77,6 +78,17 @@ def __init__(
self.faults = faults


def _calc_upscale_sampler_steps(payload):
"""Calculates the amount of hires_fix upscaler steps based on the denoising used and the steps used for the
primary image"""
upscale_steps = round(payload["ddim_steps"] * (0.9 - payload["hires_fix_denoising_strength"]))
if upscale_steps < 3:
upscale_steps = 3

logger.debug(f"Upscale steps calculated as {upscale_steps}")
return upscale_steps


class HordeLib:
_instance: HordeLib | None = None
_initialised = False
Expand Down Expand Up @@ -227,7 +239,7 @@ class HordeLib:
"upscale_sampler.denoise": "hires_fix_denoising_strength",
"upscale_sampler.seed": "seed",
"upscale_sampler.cfg": "cfg_scale",
"upscale_sampler.steps": "ddim_steps",
"upscale_sampler.steps": _calc_upscale_sampler_steps,
"upscale_sampler.sampler_name": "sampler_name",
"controlnet_apply.strength": "control_strength",
"controlnet_model_loader.control_net_name": "control_type",
Expand All @@ -243,6 +255,8 @@ class HordeLib:
"sampler_stage_c.denoise": "denoising_strength",
"sampler_stage_b.seed": "seed",
"sampler_stage_c.seed": "seed",
"sampler_stage_b.steps": "ddim_steps*0.33",
"sampler_stage_c.steps": "ddim_steps*0.67",
"model_loader_stage_c.ckpt_name": "stable_cascade_stage_c",
"model_loader_stage_c.model_name": "stable_cascade_stage_c",
"model_loader_stage_c.horde_model_name": "model_name",
Expand All @@ -251,8 +265,10 @@ class HordeLib:
"model_loader_stage_b.horde_model_name": "model_name",
# Stable Cascade 2pass
"2pass_sampler_stage_c.sampler_name": "sampler_name",
"2pass_sampler_stage_c.steps": "ddim_steps*0.67",
"2pass_sampler_stage_c.denoise": "hires_fix_denoising_strength",
"2pass_sampler_stage_b.sampler_name": "sampler_name",
"2pass_sampler_stage_b.steps": "ddim_steps*0.33",
# QR Codes
"sampler_bg.sampler_name": "sampler_name",
"sampler_bg.cfg": "cfg_scale",
Expand Down Expand Up @@ -519,8 +535,15 @@ def _apply_aihorde_compatibility_hacks(self, payload: dict) -> tuple[dict, list[

# Turn off hires fix if we're not generating a hires image, or if the params are just confused
try:
if "hires_fix" in payload and (payload["width"] <= 512 or payload["height"] <= 512):
payload["hires_fix"] = False
if "hires_fix" in payload:
if SharedModelManager.manager.compvis.model_reference[model].get(
"baseline",
) == "stable diffusion 1" and (payload["width"] <= 512 or payload["height"] <= 512):
payload["hires_fix"] = False
elif SharedModelManager.manager.compvis.model_reference[model].get(
"baseline",
) == "stable_diffusion_xl" and (payload["width"] <= 1024 or payload["height"] <= 1024):
payload["hires_fix"] = False
except (TypeError, KeyError):
payload["hires_fix"] = False

Expand Down Expand Up @@ -792,8 +815,18 @@ def _final_pipeline_adjustments(self, payload, pipeline_data) -> tuple[dict, lis
# Translate the payload parameters into pipeline parameters
pipeline_params = {}
for newkey, key in HordeLib.PAYLOAD_TO_PIPELINE_PARAMETER_MAPPING.items():
if key in payload:
pipeline_params[newkey] = payload.get(key)
multiplier = None
# We allow a multiplier in the param, so that I can adjust easily the
# values for steps on things like stable cascade
if isinstance(key, FunctionType):
pipeline_params[newkey] = key(payload)
elif "*" in key:
key, multiplier = key.split("*", 1)
elif key in payload:
if multiplier:
pipeline_params[newkey] = round(payload.get(key) * float(multiplier))
else:
pipeline_params[newkey] = payload.get(key)
else:
logger.error(f"Parameter {key} not found")
# We inject these parameters to ensure the HordeCheckpointLoader knows what file to load, if necessary
Expand Down Expand Up @@ -827,8 +860,12 @@ def _final_pipeline_adjustments(self, payload, pipeline_data) -> tuple[dict, lis
original_height = pipeline_params.get("empty_latent_image.height")

if original_width is None or original_height is None:
logger.error("empty_latent_image.width or empty_latent_image.height not found. Using 512x512.")
original_width, original_height = (512, 512)
if model_details and model_details.get("baseline") == "stable diffusion 1":
logger.error("empty_latent_image.width or empty_latent_image.height not found. Using 512x512.")
original_width, original_height = (512, 512)
else:
logger.error("empty_latent_image.width or empty_latent_image.height not found. Using 1024x1024.")
original_width, original_height = (1024, 1024)

new_width, new_height = (None, None)

Expand Down Expand Up @@ -1041,6 +1078,8 @@ def _final_pipeline_adjustments(self, payload, pipeline_data) -> tuple[dict, lis
self.generator.reconnect_input(pipeline_data, "layer_diffuse_apply.model", "model_loader")
self.generator.reconnect_input(pipeline_data, "output_image.images", "layer_diffuse_decode_rgba")
self.generator.reconnect_input(pipeline_data, "layer_diffuse_decode_rgba.images", "vae_decode")
if payload.get("hires_fix") is True:
self.generator.reconnect_input(pipeline_data, "upscale_sampler.model", "layer_diffuse_apply")
if model_details.get("baseline") == "stable diffusion 1":
pipeline_params["layer_diffuse_apply.config"] = "SD15, Attention Injection, attn_sharing"
pipeline_params["layer_diffuse_decode_rgba.sd_version"] = "SD15"
Expand Down Expand Up @@ -1489,6 +1528,11 @@ def basic_inference_rawpng(self, payload: dict) -> list[io.BytesIO]:

def image_upscale(self, payload) -> ResultingImageReturn:
logger.debug("image_upscale called")

from hordelib.comfy_horde import log_free_ram

log_free_ram()

# AIHorde hacks to payload
payload, compatibility_faults = self._apply_aihorde_compatibility_hacks(payload)
# Remember if we were passed width and height, we wouldn't normally be passed width and height
Expand Down Expand Up @@ -1522,10 +1566,16 @@ def image_upscale(self, payload) -> ResultingImageReturn:
if not isinstance(image, Image.Image):
raise RuntimeError(f"Expected a PIL.Image.Image but got {type(image)}")

log_free_ram()
return ResultingImageReturn(image=image, rawpng=rawpng, faults=compatibility_faults + final_adjustment_faults)

def image_facefix(self, payload) -> ResultingImageReturn:
logger.debug("image_facefix called")

from hordelib.comfy_horde import log_free_ram

log_free_ram()

# AIHorde hacks to payload
payload, compatibility_faults = self._apply_aihorde_compatibility_hacks(payload)
# Check payload types/values and normalise it's format
Expand All @@ -1547,4 +1597,6 @@ def image_facefix(self, payload) -> ResultingImageReturn:
if not isinstance(image, Image.Image):
raise RuntimeError(f"Expected a PIL.Image.Image but got {type(image)}")

log_free_ram()

return ResultingImageReturn(image=image, rawpng=rawpng, faults=compatibility_faults + final_adjustment_faults)
7 changes: 6 additions & 1 deletion hordelib/nodes/node_controlnet_model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,21 @@ def INPUT_TYPES(s):
CATEGORY = "loaders"

def load_controlnet(self, model, control_net_name, model_manager):
from hordelib.comfy_horde import log_free_ram

logger.debug(f"Loading controlnet {control_net_name} through our custom node")
log_free_ram()

if not model_manager or not model_manager.manager or not model_manager.manager.controlnet:
logger.error("controlnet model_manager appears to be missing!")
raise RuntimeError # XXX better guarantees need to be made

return model_manager.manager.controlnet.merge_controlnet(
merge_result = model_manager.manager.controlnet.merge_controlnet(
control_net_name,
model,
)
log_free_ram()
return merge_result


NODE_CLASS_MAPPINGS = {"HordeDiffControlNetLoader": HordeDiffControlNetLoader}
Expand Down
8 changes: 8 additions & 0 deletions hordelib/nodes/node_lora_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,18 @@ def INPUT_TYPES(s):
CATEGORY = "loaders"

def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
from hordelib.comfy_horde import log_free_ram

log_free_ram()

_test_exception = os.getenv("FAILURE_TEST", False)
if _test_exception:
raise Exception("This tests exceptions being thrown from within the pipeline")

logger.debug(f"Loading lora {lora_name} through our custom node")

if strength_model == 0 and strength_clip == 0:
logger.debug("Strengths are 0, skipping lora loading")
return (model, clip)

if lora_name is None or lora_name == "" or lora_name == "None":
Expand Down Expand Up @@ -67,6 +74,7 @@ def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
self.loaded_lora = (lora_path, lora)

model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip)
log_free_ram()
return (model_lora, clip_lora)


Expand Down
5 changes: 4 additions & 1 deletion hordelib/nodes/node_model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from loguru import logger

from hordelib.shared_model_manager import SharedModelManager
from hordelib.comfy_horde import log_free_ram


# Don't let the name fool you, this class is trying to load all the files that will be necessary
Expand Down Expand Up @@ -44,6 +45,7 @@ def load_checkpoint(
output_clip=True,
preloading=False,
):
log_free_ram()
if file_type is not None:
logger.debug(f"Loading model {horde_model_name}:{file_type}")
else:
Expand Down Expand Up @@ -77,7 +79,7 @@ def load_checkpoint(
make_regular_vae(same_loaded_model[0][2])

logger.debug("Model was previously loaded, returning it.")

log_free_ram()
return same_loaded_model[0]

if not ckpt_name:
Expand Down Expand Up @@ -133,6 +135,7 @@ def load_checkpoint(
result[0].model.apply(make_regular)
make_regular_vae(result[2])

log_free_ram()
return result


Expand Down
4 changes: 4 additions & 0 deletions hordelib/nodes/node_upscale_model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,15 @@ def INPUT_TYPES(s):
CATEGORY = "loaders"

def load_model(self, model_name):
from hordelib.comfy_horde import log_free_ram

log_free_ram()
model_path = folder_paths.get_full_path("upscale_models", model_name)
sd = comfy.utils.load_torch_file(model_path, safe_load=True)
if "module.layers.0.residual_group.blocks.0.norm1.weight" in sd:
sd = comfy.utils.state_dict_prefix_replace(sd, {"module.": ""})
out = model_loading.load_state_dict(sd).eval()
log_free_ram()
return (out,)


Expand Down
Loading

0 comments on commit f9b6bad

Please sign in to comment.