Skip to content

Commit

Permalink
Merge branch 'master' into beta
Browse files Browse the repository at this point in the history
  • Loading branch information
jn-jairo committed Nov 21, 2023
2 parents a95d15a + 2dd5b4d commit 5279398
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 4 deletions.
15 changes: 15 additions & 0 deletions comfy_extras/nodes_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,22 @@ def crop(self, image, width, height, x, y):
img = image[:,y:to_y, x:to_x, :]
return (img,)

class RepeatImageBatch:
@classmethod
def INPUT_TYPES(s):
return {"required": { "image": ("IMAGE",),
"amount": ("INT", {"default": 1, "min": 1, "max": 64}),
}}
RETURN_TYPES = ("IMAGE",)
FUNCTION = "repeat"

CATEGORY = "image/batch"

def repeat(self, image, amount):
s = image.repeat((amount, 1,1,1))
return (s,)

NODE_CLASS_MAPPINGS = {
"ImageCrop": ImageCrop,
"RepeatImageBatch": RepeatImageBatch,
}
36 changes: 36 additions & 0 deletions comfy_extras/nodes_latent.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import comfy.utils
import torch

def reshape_latent_to(target_shape, latent):
if latent.shape[1:] != target_shape[1:]:
Expand Down Expand Up @@ -67,8 +68,43 @@ def op(self, samples, multiplier):
samples_out["samples"] = s1 * multiplier
return (samples_out,)

class LatentInterpolate:
@classmethod
def INPUT_TYPES(s):
return {"required": { "samples1": ("LATENT",),
"samples2": ("LATENT",),
"ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}),
}}

RETURN_TYPES = ("LATENT",)
FUNCTION = "op"

CATEGORY = "latent/advanced"

def op(self, samples1, samples2, ratio):
samples_out = samples1.copy()

s1 = samples1["samples"]
s2 = samples2["samples"]

s2 = reshape_latent_to(s1.shape, s2)

m1 = torch.linalg.vector_norm(s1, dim=(1))
m2 = torch.linalg.vector_norm(s2, dim=(1))

s1 = torch.nan_to_num(s1 / m1)
s2 = torch.nan_to_num(s2 / m2)

t = (s1 * ratio + s2 * (1.0 - ratio))
mt = torch.linalg.vector_norm(t, dim=(1))
st = torch.nan_to_num(t / mt)

samples_out["samples"] = st * (m1 * ratio + m2 * (1.0 - ratio))
return (samples_out,)

NODE_CLASS_MAPPINGS = {
"LatentAdd": LatentAdd,
"LatentSubtract": LatentSubtract,
"LatentMultiply": LatentMultiply,
"LatentInterpolate": LatentInterpolate,
}
17 changes: 15 additions & 2 deletions execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,6 +704,7 @@ def validate_prompt(prompt):

return (True, None, list(good_outputs), node_errors)

MAXIMUM_HISTORY_SIZE = 10000

class PromptQueue:
def __init__(self, server):
Expand Down Expand Up @@ -736,6 +737,8 @@ def get(self):
def task_done(self, item_id, outputs):
with self.mutex:
prompt = self.currently_running.pop(item_id)
if len(self.history) > MAXIMUM_HISTORY_SIZE:
self.history.pop(next(iter(self.history)))
self.history[prompt[1]] = { "prompt": prompt, "outputs": {} }
for o in outputs:
self.history[prompt[1]]["outputs"][o] = outputs[o]
Expand Down Expand Up @@ -770,10 +773,20 @@ def delete_queue_item(self, function):
return True
return False

def get_history(self, prompt_id=None):
def get_history(self, prompt_id=None, max_items=None, offset=-1):
with self.mutex:
if prompt_id is None:
return copy.deepcopy(self.history)
out = {}
i = 0
if offset < 0 and max_items is not None:
offset = len(self.history) - max_items
for k in self.history:
if i >= offset:
out[k] = self.history[k]
if max_items is not None and len(out) >= max_items:
break
i += 1
return out
elif prompt_id in self.history:
return {prompt_id: copy.deepcopy(self.history[prompt_id])}
else:
Expand Down
5 changes: 4 additions & 1 deletion server.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,10 @@ async def get_object_info_node(request):

@routes.get("/history")
async def get_history(request):
return web.json_response(self.prompt_queue.get_history())
max_items = request.rel_url.query.get("max_items", None)
if max_items is not None:
max_items = int(max_items)
return web.json_response(self.prompt_queue.get_history(max_items=max_items))

@routes.get("/history/{prompt_id}")
async def get_history(request):
Expand Down
2 changes: 1 addition & 1 deletion web/scripts/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ class ComfyApi extends EventTarget {
*/
async getHistory() {
try {
const res = await this.fetchApi("/history");
const res = await this.fetchApi("/history?max_items=200");
return { History: Object.values(await res.json()) };
} catch (error) {
console.error(error);
Expand Down

0 comments on commit 5279398

Please sign in to comment.