From 82ecb02c1ed29455047dcaa3839497b2cfa92852 Mon Sep 17 00:00:00 2001 From: catboxanon <122327233+catboxanon@users.noreply.github.com> Date: Sun, 29 Dec 2024 20:06:49 -0500 Subject: [PATCH 1/4] Remove duplicate calls to INPUT_TYPES (#6249) --- comfy_execution/graph.py | 4 ++-- execution.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/comfy_execution/graph.py b/comfy_execution/graph.py index cc6caffd..59b42b74 100644 --- a/comfy_execution/graph.py +++ b/comfy_execution/graph.py @@ -54,8 +54,8 @@ class DynamicPrompt: def get_original_prompt(self): return self.original_prompt -def get_input_info(class_def, input_name): - valid_inputs = class_def.INPUT_TYPES() +def get_input_info(class_def, input_name, valid_inputs=None): + valid_inputs = valid_inputs or class_def.INPUT_TYPES() input_info = None input_category = None if "required" in valid_inputs and input_name in valid_inputs["required"]: diff --git a/execution.py b/execution.py index 1c92830d..b18dc4b8 100644 --- a/execution.py +++ b/execution.py @@ -93,7 +93,7 @@ def get_input_data(inputs, class_def, unique_id, outputs=None, dynprompt=None, e missing_keys = {} for x in inputs: input_data = inputs[x] - input_type, input_category, input_info = get_input_info(class_def, x) + input_type, input_category, input_info = get_input_info(class_def, x, valid_inputs) def mark_missing(): missing_keys[x] = True input_data_all[x] = (None,) @@ -555,7 +555,7 @@ def validate_inputs(prompt, item, validated): received_types = {} for x in valid_inputs: - type_input, input_category, extra_info = get_input_info(obj_class, x) + type_input, input_category, extra_info = get_input_info(obj_class, x, class_inputs) assert extra_info is not None if x not in inputs: if input_category == "required": From 3507870535f9049811fec428088053d304b2319e Mon Sep 17 00:00:00 2001 From: Jedrzej Kosinski Date: Mon, 30 Dec 2024 02:42:49 -0600 Subject: [PATCH 2/4] Add 'sigmas' to transformer_options so that downstream code can know about the full scope of current sampling run, fix Hook Keyframes' guarantee_steps=1 inconsistent behavior with sampling split across different Sampling nodes/sampling runs by referencing 'sigmas' (#6273) --- comfy/hooks.py | 21 +++++++++++++++++---- comfy/model_patcher.py | 5 +++-- comfy/samplers.py | 10 ++++++---- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/comfy/hooks.py b/comfy/hooks.py index cf33598a..79a7090b 100644 --- a/comfy/hooks.py +++ b/comfy/hooks.py @@ -366,9 +366,15 @@ class HookKeyframe: self.start_t = 999999999.9 self.guarantee_steps = guarantee_steps + def get_effective_guarantee_steps(self, max_sigma: torch.Tensor): + '''If keyframe starts before current sampling range (max_sigma), treat as 0.''' + if self.start_t > max_sigma: + return 0 + return self.guarantee_steps + def clone(self): c = HookKeyframe(strength=self.strength, - start_percent=self.start_percent, guarantee_steps=self.guarantee_steps) + start_percent=self.start_percent, guarantee_steps=self.guarantee_steps) c.start_t = self.start_t return c @@ -408,6 +414,12 @@ class HookKeyframeGroup: else: self._current_keyframe = None + def has_guarantee_steps(self): + for kf in self.keyframes: + if kf.guarantee_steps > 0: + return True + return False + def has_index(self, index: int): return index >= 0 and index < len(self.keyframes) @@ -425,15 +437,16 @@ class HookKeyframeGroup: for keyframe in self.keyframes: keyframe.start_t = model.model_sampling.percent_to_sigma(keyframe.start_percent) - def prepare_current_keyframe(self, curr_t: float) -> bool: + def prepare_current_keyframe(self, curr_t: float, transformer_options: dict[str, torch.Tensor]) -> bool: if self.is_empty(): return False if curr_t == self._curr_t: return False + max_sigma = torch.max(transformer_options["sigmas"]) prev_index = self._current_index prev_strength = self._current_strength # if met guaranteed steps, look for next keyframe in case need to switch - if self._current_used_steps >= self._current_keyframe.guarantee_steps: + if self._current_used_steps >= self._current_keyframe.get_effective_guarantee_steps(max_sigma): # if has next index, loop through and see if need to switch if self.has_index(self._current_index+1): for i in range(self._current_index+1, len(self.keyframes)): @@ -446,7 +459,7 @@ class HookKeyframeGroup: self._current_keyframe = eval_c self._current_used_steps = 0 # if guarantee_steps greater than zero, stop searching for other keyframes - if self._current_keyframe.guarantee_steps > 0: + if self._current_keyframe.get_effective_guarantee_steps(max_sigma) > 0: break # if eval_c is outside the percent range, stop looking further else: break diff --git a/comfy/model_patcher.py b/comfy/model_patcher.py index d89d9a6a..4597ce11 100644 --- a/comfy/model_patcher.py +++ b/comfy/model_patcher.py @@ -919,11 +919,12 @@ class ModelPatcher: def set_hook_mode(self, hook_mode: comfy.hooks.EnumHookMode): self.hook_mode = hook_mode - def prepare_hook_patches_current_keyframe(self, t: torch.Tensor, hook_group: comfy.hooks.HookGroup): + def prepare_hook_patches_current_keyframe(self, t: torch.Tensor, hook_group: comfy.hooks.HookGroup, model_options: dict[str]): curr_t = t[0] reset_current_hooks = False + transformer_options = model_options.get("transformer_options", {}) for hook in hook_group.hooks: - changed = hook.hook_keyframe.prepare_current_keyframe(curr_t=curr_t) + changed = hook.hook_keyframe.prepare_current_keyframe(curr_t=curr_t, transformer_options=transformer_options) # if keyframe changed, remove any cached HookGroups that contain hook with the same hook_ref; # this will cause the weights to be recalculated when sampling if changed: diff --git a/comfy/samplers.py b/comfy/samplers.py index 27686722..6a386511 100644 --- a/comfy/samplers.py +++ b/comfy/samplers.py @@ -144,7 +144,7 @@ def cond_cat(c_list): return out -def finalize_default_conds(model: 'BaseModel', hooked_to_run: dict[comfy.hooks.HookGroup,list[tuple[tuple,int]]], default_conds: list[list[dict]], x_in, timestep): +def finalize_default_conds(model: 'BaseModel', hooked_to_run: dict[comfy.hooks.HookGroup,list[tuple[tuple,int]]], default_conds: list[list[dict]], x_in, timestep, model_options): # need to figure out remaining unmasked area for conds default_mults = [] for _ in default_conds: @@ -183,7 +183,7 @@ def finalize_default_conds(model: 'BaseModel', hooked_to_run: dict[comfy.hooks.H # replace p's mult with calculated mult p = p._replace(mult=mult) if p.hooks is not None: - model.current_patcher.prepare_hook_patches_current_keyframe(timestep, p.hooks) + model.current_patcher.prepare_hook_patches_current_keyframe(timestep, p.hooks, model_options) hooked_to_run.setdefault(p.hooks, list()) hooked_to_run[p.hooks] += [(p, i)] @@ -218,7 +218,7 @@ def _calc_cond_batch(model: 'BaseModel', conds: list[list[dict]], x_in: torch.Te if p is None: continue if p.hooks is not None: - model.current_patcher.prepare_hook_patches_current_keyframe(timestep, p.hooks) + model.current_patcher.prepare_hook_patches_current_keyframe(timestep, p.hooks, model_options) hooked_to_run.setdefault(p.hooks, list()) hooked_to_run[p.hooks] += [(p, i)] default_conds.append(default_c) @@ -840,7 +840,9 @@ class CFGGuider: self.conds = process_conds(self.inner_model, noise, self.conds, device, latent_image, denoise_mask, seed) - extra_args = {"model_options": comfy.model_patcher.create_model_options_clone(self.model_options), "seed": seed} + extra_model_options = comfy.model_patcher.create_model_options_clone(self.model_options) + extra_model_options.setdefault("transformer_options", {})["sigmas"] = sigmas + extra_args = {"model_options": extra_model_options, "seed": seed} executor = comfy.patcher_extension.WrapperExecutor.new_class_executor( sampler.sample, From d9b7cfac7e05eabb5c81898e70179cdab195f47f Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Mon, 30 Dec 2024 04:14:59 -0500 Subject: [PATCH 3/4] Fix and enforce new lines at the end of files. --- api_server/services/file_service.py | 2 +- api_server/utils/file_operations.py | 2 +- app/app_settings.py | 2 +- comfy/ldm/util.py | 2 +- comfy_extras/nodes_load_3d.py | 2 +- comfy_extras/nodes_morphology.py | 2 +- comfy_extras/nodes_webcam.py | 2 +- ruff.toml | 1 + tests-unit/comfy_test/folder_path_test.py | 2 +- tests-unit/folder_paths_test/filter_by_content_types_test.py | 2 +- 10 files changed, 10 insertions(+), 9 deletions(-) diff --git a/api_server/services/file_service.py b/api_server/services/file_service.py index 39457108..115edccd 100644 --- a/api_server/services/file_service.py +++ b/api_server/services/file_service.py @@ -10,4 +10,4 @@ class FileService: if directory_key not in self.allowed_directories: raise ValueError("Invalid directory key") directory_path: str = self.allowed_directories[directory_key] - return self.file_system_ops.walk_directory(directory_path) \ No newline at end of file + return self.file_system_ops.walk_directory(directory_path) diff --git a/api_server/utils/file_operations.py b/api_server/utils/file_operations.py index ef1bf999..32d6e047 100644 --- a/api_server/utils/file_operations.py +++ b/api_server/utils/file_operations.py @@ -39,4 +39,4 @@ class FileSystemOperations: "path": relative_path, "type": "directory" }) - return file_list \ No newline at end of file + return file_list diff --git a/app/app_settings.py b/app/app_settings.py index 8c6edc56..efe87adb 100644 --- a/app/app_settings.py +++ b/app/app_settings.py @@ -51,4 +51,4 @@ class AppSettings(): settings = self.get_settings(request) settings[setting_id] = await request.json() self.save_settings(request, settings) - return web.Response(status=200) \ No newline at end of file + return web.Response(status=200) diff --git a/comfy/ldm/util.py b/comfy/ldm/util.py index 2ed4aa2a..30b4b472 100644 --- a/comfy/ldm/util.py +++ b/comfy/ldm/util.py @@ -194,4 +194,4 @@ class AdamWwithEMAandWings(optim.Optimizer): for param, ema_param in zip(params_with_grad, ema_params_with_grad): ema_param.mul_(cur_ema_decay).add_(param.float(), alpha=1 - cur_ema_decay) - return loss \ No newline at end of file + return loss diff --git a/comfy_extras/nodes_load_3d.py b/comfy_extras/nodes_load_3d.py index b72a357b..d10c4e12 100644 --- a/comfy_extras/nodes_load_3d.py +++ b/comfy_extras/nodes_load_3d.py @@ -121,4 +121,4 @@ NODE_DISPLAY_NAME_MAPPINGS = { "Load3D": "Load 3D", "Load3DAnimation": "Load 3D - Animation", "Preview3D": "Preview 3D" -} \ No newline at end of file +} diff --git a/comfy_extras/nodes_morphology.py b/comfy_extras/nodes_morphology.py index 071521d8..b1372b8c 100644 --- a/comfy_extras/nodes_morphology.py +++ b/comfy_extras/nodes_morphology.py @@ -46,4 +46,4 @@ NODE_CLASS_MAPPINGS = { NODE_DISPLAY_NAME_MAPPINGS = { "Morphology": "ImageMorphology", -} \ No newline at end of file +} diff --git a/comfy_extras/nodes_webcam.py b/comfy_extras/nodes_webcam.py index 32a0ba2f..31eddb2d 100644 --- a/comfy_extras/nodes_webcam.py +++ b/comfy_extras/nodes_webcam.py @@ -30,4 +30,4 @@ NODE_CLASS_MAPPINGS = { NODE_DISPLAY_NAME_MAPPINGS = { "WebcamCapture": "Webcam Capture", -} \ No newline at end of file +} diff --git a/ruff.toml b/ruff.toml index 30488e86..6f9ac932 100644 --- a/ruff.toml +++ b/ruff.toml @@ -5,6 +5,7 @@ lint.ignore = ["ALL"] lint.select = [ "S307", # suspicious-eval-usage "T201", # print-usage + "W292", "W293", # The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names. # See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f diff --git a/tests-unit/comfy_test/folder_path_test.py b/tests-unit/comfy_test/folder_path_test.py index 55613505..f8173cdc 100644 --- a/tests-unit/comfy_test/folder_path_test.py +++ b/tests-unit/comfy_test/folder_path_test.py @@ -95,4 +95,4 @@ def test_get_save_image_path(temp_dir): assert filename == "test" assert counter == 1 assert subfolder == "" - assert filename_prefix == "test" \ No newline at end of file + assert filename_prefix == "test" diff --git a/tests-unit/folder_paths_test/filter_by_content_types_test.py b/tests-unit/folder_paths_test/filter_by_content_types_test.py index 5574789e..6b334313 100644 --- a/tests-unit/folder_paths_test/filter_by_content_types_test.py +++ b/tests-unit/folder_paths_test/filter_by_content_types_test.py @@ -49,4 +49,4 @@ def test_handles_no_extension(): def test_handles_no_files(): files = [] - assert filter_files_content_types(files, ["image", "audio", "video"]) == [] \ No newline at end of file + assert filter_files_content_types(files, ["image", "audio", "video"]) == [] From a90aafafc119b1bc5dc18b86dd419546d5643fb5 Mon Sep 17 00:00:00 2001 From: blepping <157360029+blepping@users.noreply.github.com> Date: Mon, 30 Dec 2024 03:09:38 -0700 Subject: [PATCH 4/4] Add kl_optimal scheduler (#6206) * Add kl_optimal scheduler * Rename kl_optimal_schedule to kl_optimal_scheduler to be more consistent --- comfy/samplers.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/comfy/samplers.py b/comfy/samplers.py index 6a386511..cd25bc35 100644 --- a/comfy/samplers.py +++ b/comfy/samplers.py @@ -467,6 +467,13 @@ def linear_quadratic_schedule(model_sampling, steps, threshold_noise=0.025, line sigma_schedule = [1.0 - x for x in sigma_schedule] return torch.FloatTensor(sigma_schedule) * model_sampling.sigma_max.cpu() +# Referenced from https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/15608 +def kl_optimal_scheduler(n: int, sigma_min: float, sigma_max: float) -> torch.Tensor: + adj_idxs = torch.arange(n, dtype=torch.float).div_(n - 1) + sigmas = adj_idxs.new_zeros(n + 1) + sigmas[:-1] = (adj_idxs * math.atan(sigma_min) + (1 - adj_idxs) * math.atan(sigma_max)).tan_() + return sigmas + def get_mask_aabb(masks): if masks.numel() == 0: return torch.zeros((0, 4), device=masks.device, dtype=torch.int) @@ -913,7 +920,7 @@ def sample(model, noise, positive, negative, cfg, device, sampler, sigmas, model return cfg_guider.sample(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed) -SCHEDULER_NAMES = ["normal", "karras", "exponential", "sgm_uniform", "simple", "ddim_uniform", "beta", "linear_quadratic"] +SCHEDULER_NAMES = ["normal", "karras", "exponential", "sgm_uniform", "simple", "ddim_uniform", "beta", "linear_quadratic", "kl_optimal"] SAMPLER_NAMES = KSAMPLER_NAMES + ["ddim", "uni_pc", "uni_pc_bh2"] def calculate_sigmas(model_sampling, scheduler_name, steps): @@ -933,6 +940,8 @@ def calculate_sigmas(model_sampling, scheduler_name, steps): sigmas = beta_scheduler(model_sampling, steps) elif scheduler_name == "linear_quadratic": sigmas = linear_quadratic_schedule(model_sampling, steps) + elif scheduler_name == "kl_optimal": + sigmas = kl_optimal_scheduler(n=steps, sigma_min=float(model_sampling.sigma_min), sigma_max=float(model_sampling.sigma_max)) else: logging.error("error invalid scheduler {}".format(scheduler_name)) return sigmas