From 2cddbf0821c0e9b74993b2e2b546af2b0d57aa2c Mon Sep 17 00:00:00 2001 From: Chenlei Hu Date: Thu, 12 Dec 2024 15:55:26 -0800 Subject: [PATCH] Lint and fix undefined names (1/N) (#6028) --- comfy/gligen.py | 1 + comfy/ldm/audio/dit.py | 8 ++++---- comfy/ldm/models/autoencoder.py | 12 +++++++----- comfy/ldm/modules/diffusionmodules/mmdit.py | 1 + ruff.toml | 2 ++ 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/comfy/gligen.py b/comfy/gligen.py index 59252276..161d8a5e 100644 --- a/comfy/gligen.py +++ b/comfy/gligen.py @@ -1,3 +1,4 @@ +import math import torch from torch import nn from .ldm.modules.attention import CrossAttention diff --git a/comfy/ldm/audio/dit.py b/comfy/ldm/audio/dit.py index 3e2b4ebc..179c5b67 100644 --- a/comfy/ldm/audio/dit.py +++ b/comfy/ldm/audio/dit.py @@ -228,9 +228,9 @@ class FeedForward(nn.Module): linear_in = GLU(dim, inner_dim, activation, dtype=dtype, device=device, operations=operations) else: linear_in = nn.Sequential( - Rearrange('b n d -> b d n') if use_conv else nn.Identity(), + rearrange('b n d -> b d n') if use_conv else nn.Identity(), operations.Linear(dim, inner_dim, bias = not no_bias, dtype=dtype, device=device) if not use_conv else operations.Conv1d(dim, inner_dim, conv_kernel_size, padding = (conv_kernel_size // 2), bias = not no_bias, dtype=dtype, device=device), - Rearrange('b n d -> b d n') if use_conv else nn.Identity(), + rearrange('b n d -> b d n') if use_conv else nn.Identity(), activation ) @@ -245,9 +245,9 @@ class FeedForward(nn.Module): self.ff = nn.Sequential( linear_in, - Rearrange('b d n -> b n d') if use_conv else nn.Identity(), + rearrange('b d n -> b n d') if use_conv else nn.Identity(), linear_out, - Rearrange('b n d -> b d n') if use_conv else nn.Identity(), + rearrange('b n d -> b d n') if use_conv else nn.Identity(), ) def forward(self, x): diff --git a/comfy/ldm/models/autoencoder.py b/comfy/ldm/models/autoencoder.py index 3b5bc0e6..6b6f0f27 100644 --- a/comfy/ldm/models/autoencoder.py +++ b/comfy/ldm/models/autoencoder.py @@ -1,3 +1,5 @@ +import logging +import math import torch from contextlib import contextmanager from typing import Any, Dict, Tuple, Union @@ -52,7 +54,7 @@ class AbstractAutoencoder(torch.nn.Module): if self.use_ema: self.model_ema = LitEma(self, decay=ema_decay) - logpy.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + logging.info(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") def get_input(self, batch) -> Any: raise NotImplementedError() @@ -68,14 +70,14 @@ class AbstractAutoencoder(torch.nn.Module): self.model_ema.store(self.parameters()) self.model_ema.copy_to(self) if context is not None: - logpy.info(f"{context}: Switched to EMA weights") + logging.info(f"{context}: Switched to EMA weights") try: yield None finally: if self.use_ema: self.model_ema.restore(self.parameters()) if context is not None: - logpy.info(f"{context}: Restored training weights") + logging.info(f"{context}: Restored training weights") def encode(self, *args, **kwargs) -> torch.Tensor: raise NotImplementedError("encode()-method of abstract base class called") @@ -84,7 +86,7 @@ class AbstractAutoencoder(torch.nn.Module): raise NotImplementedError("decode()-method of abstract base class called") def instantiate_optimizer_from_config(self, params, lr, cfg): - logpy.info(f"loading >>> {cfg['target']} <<< optimizer from config") + logging.info(f"loading >>> {cfg['target']} <<< optimizer from config") return get_obj_from_str(cfg["target"])( params, lr=lr, **cfg.get("params", dict()) ) @@ -112,7 +114,7 @@ class AutoencodingEngine(AbstractAutoencoder): self.encoder: torch.nn.Module = instantiate_from_config(encoder_config) self.decoder: torch.nn.Module = instantiate_from_config(decoder_config) - self.regularization: AbstractRegularizer = instantiate_from_config( + self.regularization = instantiate_from_config( regularizer_config ) diff --git a/comfy/ldm/modules/diffusionmodules/mmdit.py b/comfy/ldm/modules/diffusionmodules/mmdit.py index 7365503f..b052c48e 100644 --- a/comfy/ldm/modules/diffusionmodules/mmdit.py +++ b/comfy/ldm/modules/diffusionmodules/mmdit.py @@ -1,3 +1,4 @@ +from functools import partial from typing import Dict, Optional, List import numpy as np diff --git a/ruff.toml b/ruff.toml index 39de4d40..41d3dc5b 100644 --- a/ruff.toml +++ b/ruff.toml @@ -6,4 +6,6 @@ lint.select = [ "S307", # suspicious-eval-usage "F401", # unused-import "F841", # unused-local-variable + # TODO: Enable F821 after all errors has been fixed. Remaining errors: 7. + # "F821", # undefined-name ] \ No newline at end of file