Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 48 additions & 34 deletions comfy_extras/nodes_ace.py
Original file line number Diff line number Diff line change
@@ -1,49 +1,63 @@
import torch
from typing_extensions import override

import comfy.model_management
import node_helpers
from comfy_api.latest import ComfyExtension, io


class TextEncodeAceStepAudio(io.ComfyNode):
@classmethod
def define_schema(cls):
return io.Schema(
node_id="TextEncodeAceStepAudio",
category="conditioning",
inputs=[
io.Clip.Input("clip"),
io.String.Input("tags", multiline=True, dynamic_prompts=True),
io.String.Input("lyrics", multiline=True, dynamic_prompts=True),
io.Float.Input("lyrics_strength", default=1.0, min=0.0, max=10.0, step=0.01),
],
outputs=[io.Conditioning.Output()],
)

class TextEncodeAceStepAudio:
@classmethod
def INPUT_TYPES(s):
return {"required": {
"clip": ("CLIP", ),
"tags": ("STRING", {"multiline": True, "dynamicPrompts": True}),
"lyrics": ("STRING", {"multiline": True, "dynamicPrompts": True}),
"lyrics_strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}),
}}
RETURN_TYPES = ("CONDITIONING",)
FUNCTION = "encode"

CATEGORY = "conditioning"

def encode(self, clip, tags, lyrics, lyrics_strength):
def execute(cls, clip, tags, lyrics, lyrics_strength) -> io.NodeOutput:
tokens = clip.tokenize(tags, lyrics=lyrics)
conditioning = clip.encode_from_tokens_scheduled(tokens)
conditioning = node_helpers.conditioning_set_values(conditioning, {"lyrics_strength": lyrics_strength})
return (conditioning, )
return io.NodeOutput(conditioning)


class EmptyAceStepLatentAudio:
def __init__(self):
self.device = comfy.model_management.intermediate_device()

class EmptyAceStepLatentAudio(io.ComfyNode):
@classmethod
def INPUT_TYPES(s):
return {"required": {"seconds": ("FLOAT", {"default": 120.0, "min": 1.0, "max": 1000.0, "step": 0.1}),
"batch_size": ("INT", {"default": 1, "min": 1, "max": 4096, "tooltip": "The number of latent images in the batch."}),
}}
RETURN_TYPES = ("LATENT",)
FUNCTION = "generate"

CATEGORY = "latent/audio"
def define_schema(cls):
return io.Schema(
node_id="EmptyAceStepLatentAudio",
category="latent/audio",
inputs=[
io.Float.Input("seconds", default=120.0, min=1.0, max=1000.0, step=0.1),
io.Int.Input(
"batch_size", default=1, min=1, max=4096, tooltip="The number of latent images in the batch."
),
],
outputs=[io.Latent.Output()],
)

def generate(self, seconds, batch_size):
@classmethod
def execute(cls, seconds, batch_size) -> io.NodeOutput:
length = int(seconds * 44100 / 512 / 8)
latent = torch.zeros([batch_size, 8, 16, length], device=self.device)
return ({"samples": latent, "type": "audio"}, )
latent = torch.zeros([batch_size, 8, 16, length], device=comfy.model_management.intermediate_device())
return io.NodeOutput({"samples": latent, "type": "audio"})


class AceExtension(ComfyExtension):
@override
async def get_node_list(self) -> list[type[io.ComfyNode]]:
return [
TextEncodeAceStepAudio,
EmptyAceStepLatentAudio,
]

NODE_CLASS_MAPPINGS = {
"TextEncodeAceStepAudio": TextEncodeAceStepAudio,
"EmptyAceStepLatentAudio": EmptyAceStepLatentAudio,
}
async def comfy_entrypoint() -> AceExtension:
return AceExtension()
90 changes: 50 additions & 40 deletions comfy_extras/nodes_advanced_samplers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
import comfy.samplers
import comfy.utils
import torch
import numpy as np
import torch
from tqdm.auto import trange
from typing_extensions import override

import comfy.model_patcher
import comfy.samplers
import comfy.utils
from comfy.k_diffusion.sampling import to_d
from comfy_api.latest import ComfyExtension, io


@torch.no_grad()
Expand Down Expand Up @@ -33,30 +38,29 @@ def sample_lcm_upscale(model, x, sigmas, extra_args=None, callback=None, disable
return x


class SamplerLCMUpscale:
upscale_methods = ["bislerp", "nearest-exact", "bilinear", "area", "bicubic"]
class SamplerLCMUpscale(io.ComfyNode):
UPSCALE_METHODS = ["bislerp", "nearest-exact", "bilinear", "area", "bicubic"]

@classmethod
def define_schema(cls) -> io.Schema:
return io.Schema(
node_id="SamplerLCMUpscale",
category="sampling/custom_sampling/samplers",
inputs=[
io.Float.Input("scale_ratio", default=1.0, min=0.1, max=20.0, step=0.01),
io.Int.Input("scale_steps", default=-1, min=-1, max=1000, step=1),
io.Combo.Input("upscale_method", options=cls.UPSCALE_METHODS),
],
outputs=[io.Sampler.Output()],
)

@classmethod
def INPUT_TYPES(s):
return {"required":
{"scale_ratio": ("FLOAT", {"default": 1.0, "min": 0.1, "max": 20.0, "step": 0.01}),
"scale_steps": ("INT", {"default": -1, "min": -1, "max": 1000, "step": 1}),
"upscale_method": (s.upscale_methods,),
}
}
RETURN_TYPES = ("SAMPLER",)
CATEGORY = "sampling/custom_sampling/samplers"

FUNCTION = "get_sampler"

def get_sampler(self, scale_ratio, scale_steps, upscale_method):
def execute(cls, scale_ratio, scale_steps, upscale_method) -> io.NodeOutput:
if scale_steps < 0:
scale_steps = None
sampler = comfy.samplers.KSAMPLER(sample_lcm_upscale, extra_options={"total_upscale": scale_ratio, "upscale_steps": scale_steps, "upscale_method": upscale_method})
return (sampler, )
return io.NodeOutput(sampler)

from comfy.k_diffusion.sampling import to_d
import comfy.model_patcher

@torch.no_grad()
def sample_euler_pp(model, x, sigmas, extra_args=None, callback=None, disable=None):
Expand All @@ -82,30 +86,36 @@ def post_cfg_function(args):
return x


class SamplerEulerCFGpp:
class SamplerEulerCFGpp(io.ComfyNode):
@classmethod
def INPUT_TYPES(s):
return {"required":
{"version": (["regular", "alternative"],),}
}
RETURN_TYPES = ("SAMPLER",)
# CATEGORY = "sampling/custom_sampling/samplers"
CATEGORY = "_for_testing"

FUNCTION = "get_sampler"
def define_schema(cls) -> io.Schema:
return io.Schema(
node_id="SamplerEulerCFGpp",
display_name="SamplerEulerCFG++",
category="_for_testing", # "sampling/custom_sampling/samplers"
inputs=[
io.Combo.Input("version", options=["regular", "alternative"]),
],
outputs=[io.Sampler.Output()],
is_experimental=True,
)

def get_sampler(self, version):
@classmethod
def execute(cls, version) -> io.NodeOutput:
if version == "alternative":
sampler = comfy.samplers.KSAMPLER(sample_euler_pp)
else:
sampler = comfy.samplers.ksampler("euler_cfg_pp")
return (sampler, )
return io.NodeOutput(sampler)


NODE_CLASS_MAPPINGS = {
"SamplerLCMUpscale": SamplerLCMUpscale,
"SamplerEulerCFGpp": SamplerEulerCFGpp,
}
class AdvancedSamplersExtension(ComfyExtension):
@override
async def get_node_list(self) -> list[type[io.ComfyNode]]:
return [
SamplerLCMUpscale,
SamplerEulerCFGpp,
]

NODE_DISPLAY_NAME_MAPPINGS = {
"SamplerEulerCFGpp": "SamplerEulerCFG++",
}
async def comfy_entrypoint() -> AdvancedSamplersExtension:
return AdvancedSamplersExtension()
74 changes: 52 additions & 22 deletions comfy_extras/nodes_apg.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,54 @@
import torch
from typing_extensions import override

from comfy_api.latest import ComfyExtension, io


def project(v0, v1):
v1 = torch.nn.functional.normalize(v1, dim=[-1, -2, -3])
v0_parallel = (v0 * v1).sum(dim=[-1, -2, -3], keepdim=True) * v1
v0_orthogonal = v0 - v0_parallel
return v0_parallel, v0_orthogonal

class APG:
class APG(io.ComfyNode):
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"model": ("MODEL",),
"eta": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01, "tooltip": "Controls the scale of the parallel guidance vector. Default CFG behavior at a setting of 1."}),
"norm_threshold": ("FLOAT", {"default": 5.0, "min": 0.0, "max": 50.0, "step": 0.1, "tooltip": "Normalize guidance vector to this value, normalization disable at a setting of 0."}),
"momentum": ("FLOAT", {"default": 0.0, "min": -5.0, "max": 1.0, "step": 0.01, "tooltip":"Controls a running average of guidance during diffusion, disabled at a setting of 0."}),
}
}
RETURN_TYPES = ("MODEL",)
FUNCTION = "patch"
CATEGORY = "sampling/custom_sampling"

def patch(self, model, eta, norm_threshold, momentum):
def define_schema(cls) -> io.Schema:
return io.Schema(
node_id="APG",
display_name="Adaptive Projected Guidance",
category="sampling/custom_sampling",
inputs=[
io.Model.Input("model"),
io.Float.Input(
"eta",
default=1.0,
min=-10.0,
max=10.0,
step=0.01,
tooltip="Controls the scale of the parallel guidance vector. Default CFG behavior at a setting of 1.",
),
io.Float.Input(
"norm_threshold",
default=5.0,
min=0.0,
max=50.0,
step=0.1,
tooltip="Normalize guidance vector to this value, normalization disable at a setting of 0.",
),
io.Float.Input(
"momentum",
default=0.0,
min=-5.0,
max=1.0,
step=0.01,
tooltip="Controls a running average of guidance during diffusion, disabled at a setting of 0.",
),
],
outputs=[io.Model.Output()],
)

@classmethod
def execute(cls, model, eta, norm_threshold, momentum) -> io.NodeOutput:
running_avg = 0
prev_sigma = None

Expand Down Expand Up @@ -65,12 +92,15 @@ def pre_cfg_function(args):

m = model.clone()
m.set_model_sampler_pre_cfg_function(pre_cfg_function)
return (m,)
return io.NodeOutput(m)


NODE_CLASS_MAPPINGS = {
"APG": APG,
}
class ApgExtension(ComfyExtension):
@override
async def get_node_list(self) -> list[type[io.ComfyNode]]:
return [
APG,
]

NODE_DISPLAY_NAME_MAPPINGS = {
"APG": "Adaptive Projected Guidance",
}
async def comfy_entrypoint() -> ApgExtension:
return ApgExtension()
Loading
Loading