diff --git a/comfy/k_diffusion/sampling.py b/comfy/k_diffusion/sampling.py index ae56aee8..e9e4edcc 100644 --- a/comfy/k_diffusion/sampling.py +++ b/comfy/k_diffusion/sampling.py @@ -44,6 +44,17 @@ def get_sigmas_vp(n, beta_d=19.9, beta_min=0.1, eps_s=1e-3, device='cpu'): return append_zero(sigmas) +def get_sigmas_laplace(n, sigma_min, sigma_max, mu=0., beta=0.5, device='cpu'): + """Constructs the noise schedule proposed by Tiankai et al. (2024). """ + epsilon = 1e-5 # avoid log(0) + x = torch.linspace(0, 1, n, device=device) + clamp = lambda x: torch.clamp(x, min=sigma_min, max=sigma_max) + lmb = mu - beta * torch.sign(0.5-x) * torch.log(1 - 2 * torch.abs(0.5-x) + epsilon) + sigmas = clamp(torch.exp(lmb)) + return sigmas + + + def to_d(x, sigma, denoised): """Converts a denoiser output to a Karras ODE derivative.""" return (x - denoised) / utils.append_dims(sigma, x.ndim) diff --git a/comfy_extras/nodes_custom_sampler.py b/comfy_extras/nodes_custom_sampler.py index 219975e2..c7ff9a4d 100644 --- a/comfy_extras/nodes_custom_sampler.py +++ b/comfy_extras/nodes_custom_sampler.py @@ -90,6 +90,27 @@ class PolyexponentialScheduler: sigmas = k_diffusion_sampling.get_sigmas_polyexponential(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho) return (sigmas, ) +class LaplaceScheduler: + @classmethod + def INPUT_TYPES(s): + return {"required": + {"steps": ("INT", {"default": 20, "min": 1, "max": 10000}), + "sigma_max": ("FLOAT", {"default": 14.614642, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "sigma_min": ("FLOAT", {"default": 0.0291675, "min": 0.0, "max": 5000.0, "step":0.01, "round": False}), + "mu": ("FLOAT", {"default": 0.0, "min": -10.0, "max": 10.0, "step":0.1, "round": False}), + "beta": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 10.0, "step":0.1, "round": False}), + } + } + RETURN_TYPES = ("SIGMAS",) + CATEGORY = "sampling/custom_sampling/schedulers" + + FUNCTION = "get_sigmas" + + def get_sigmas(self, steps, sigma_max, sigma_min, mu, beta): + sigmas = k_diffusion_sampling.get_sigmas_laplace(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, mu=mu, beta=beta) + return (sigmas, ) + + class SDTurboScheduler: @classmethod def INPUT_TYPES(s): @@ -673,6 +694,7 @@ NODE_CLASS_MAPPINGS = { "KarrasScheduler": KarrasScheduler, "ExponentialScheduler": ExponentialScheduler, "PolyexponentialScheduler": PolyexponentialScheduler, + "LaplaceScheduler": LaplaceScheduler, "VPScheduler": VPScheduler, "BetaSamplingScheduler": BetaSamplingScheduler, "SDTurboScheduler": SDTurboScheduler,