Lint unused import (#5973)

* Lint unused import

* nit

* Remove unused imports

* revert fix_torch import

* nit
This commit is contained in:
Chenlei Hu 2024-12-09 14:24:39 -06:00 committed by GitHub
parent e2fafe0686
commit 0fd4e6c778
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
40 changed files with 39 additions and 75 deletions

View File

@ -1,3 +1,3 @@
[MESSAGES CONTROL]
disable=all
enable=eval-used
enable=eval-used, unused-import

View File

@ -2,11 +2,9 @@
#and modified
import torch
import torch as th
import torch.nn as nn
from ..ldm.modules.diffusionmodules.util import (
zero_module,
timestep_embedding,
)

View File

@ -1,10 +1,8 @@
import math
from typing import List, Optional, Tuple
import numpy as np
import torch
import torch.nn as nn
from einops import rearrange
from torch import Tensor
from comfy.ldm.modules.diffusionmodules.mmdit import DismantledBlock, PatchEmbed, VectorEmbedder, TimestepEmbedder, get_2d_sincos_pos_embed_torch

View File

@ -1,5 +1,5 @@
import torch
from typing import Dict, Optional
from typing import Optional
import comfy.ldm.modules.diffusionmodules.mmdit
class ControlNet(comfy.ldm.modules.diffusionmodules.mmdit.MMDiT):

View File

@ -1,10 +1,9 @@
#code taken from: https://github.com/wl-zhao/UniPC and modified
import torch
import torch.nn.functional as F
import math
from tqdm.auto import trange, tqdm
from tqdm.auto import trange
class NoiseScheduleVP:

View File

@ -2,7 +2,7 @@
import torch
from torch import nn
from typing import Literal, Dict, Any
from typing import Literal
import math
import comfy.ops
ops = comfy.ops.disable_weight_init

View File

@ -2,8 +2,8 @@
import torch
import torch.nn as nn
from torch import Tensor, einsum
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, TypeVar, Union
from torch import Tensor
from typing import List, Union
from einops import rearrange
import math
import comfy.ops

View File

@ -16,7 +16,6 @@
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import torch
import torchvision
from torch import nn
from .common import LayerNorm2d_op

View File

@ -6,9 +6,7 @@ import math
from torch import Tensor, nn
from einops import rearrange, repeat
from .layers import (DoubleStreamBlock, EmbedND, LastLayer,
MLPEmbedder, SingleStreamBlock,
timestep_embedding)
from .layers import (timestep_embedding)
from .model import Flux
import comfy.ldm.common_dit

View File

@ -1,7 +1,7 @@
#original code from https://github.com/genmoai/models under apache 2.0 license
#adapted to ComfyUI
from typing import Optional, Tuple
from typing import Optional
import torch
import torch.nn as nn

View File

@ -1,7 +1,7 @@
#original code from https://github.com/genmoai/models under apache 2.0 license
#adapted to ComfyUI
from typing import Callable, List, Optional, Tuple, Union
from typing import List, Optional, Tuple, Union
from functools import partial
import math

View File

@ -1,24 +1,17 @@
from typing import Any, Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils import checkpoint
from comfy.ldm.modules.diffusionmodules.mmdit import (
Mlp,
TimestepEmbedder,
PatchEmbed,
RMSNorm,
)
from comfy.ldm.modules.diffusionmodules.util import timestep_embedding
from .poolers import AttentionPool
import comfy.latent_formats
from .models import HunYuanDiTBlock, calc_rope
from .posemb_layers import get_2d_rotary_pos_embed, get_fill_resize_and_crop
class HunYuanControlNet(nn.Module):

View File

@ -1,8 +1,6 @@
from typing import Any
import torch
import torch.nn as nn
import torch.nn.functional as F
import comfy.ops
from comfy.ldm.modules.diffusionmodules.mmdit import Mlp, TimestepEmbedder, PatchEmbed, RMSNorm

View File

@ -1,6 +1,5 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
from comfy.ldm.modules.attention import optimized_attention
import comfy.ops

View File

@ -3,7 +3,7 @@ from torch import nn
from functools import partial
import math
from einops import rearrange
from typing import Any, Mapping, Optional, Tuple, Union, List
from typing import Optional, Tuple, Union
from .conv_nd_factory import make_conv_nd, make_linear_nd
from .pixel_norm import PixelNorm

View File

@ -1,6 +1,5 @@
from typing import Tuple, Union
import torch
from .dual_conv3d import DualConv3d
from .causal_conv3d import CausalConv3d

View File

@ -1,6 +1,6 @@
import torch
from contextlib import contextmanager
from typing import Any, Dict, List, Optional, Tuple, Union
from typing import Any, Dict, Tuple, Union
from comfy.ldm.modules.distributions.distributions import DiagonalGaussianDistribution

View File

@ -1,5 +1,3 @@
import logging
import math
from typing import Dict, Optional, List
import numpy as np

View File

@ -3,7 +3,6 @@ import math
import torch
import torch.nn as nn
import numpy as np
from typing import Optional, Any
import logging
from comfy import model_management

View File

@ -9,7 +9,6 @@ import logging
from .util import (
checkpoint,
avg_pool_nd,
zero_module,
timestep_embedding,
AlphaBlender,
)

View File

@ -4,7 +4,6 @@ import numpy as np
from functools import partial
from .util import extract_into_tensor, make_beta_schedule
from comfy.ldm.util import default
class AbstractLowScaleModel(nn.Module):

View File

@ -8,7 +8,6 @@
# thanks!
import os
import math
import torch
import torch.nn as nn

View File

@ -1,5 +1,5 @@
import functools
from typing import Callable, Iterable, Union
from typing import Iterable, Union
import torch
from einops import rearrange, repeat

View File

@ -1,6 +1,5 @@
from __future__ import annotations
import uuid
import torch
import comfy.model_management
import comfy.conds
import comfy.utils

View File

@ -1,4 +1,3 @@
import os
import torch
class SPieceTokenizer:

View File

@ -2,8 +2,7 @@ import comfy.samplers
import comfy.utils
import torch
import numpy as np
from tqdm.auto import trange, tqdm
import math
from tqdm.auto import trange
@torch.no_grad()

View File

@ -1,4 +1,3 @@
import torch
from nodes import MAX_RESOLUTION
class CLIPTextEncodeSDXLRefiner:

View File

@ -1,4 +1,3 @@
import numpy as np
import torch
import comfy.utils
from enum import Enum

View File

@ -4,7 +4,6 @@ import torch
from collections.abc import Iterable
if TYPE_CHECKING:
from comfy.model_patcher import ModelPatcher
from comfy.sd import CLIP
import comfy.hooks

View File

@ -1,4 +1,3 @@
import folder_paths
import comfy.sd
import comfy.model_sampling
import comfy.latent_formats

View File

@ -1,4 +1,3 @@
import torch
import comfy.utils
class PatchModelAddDownscale:

View File

@ -1,4 +1,3 @@
import os
import logging
from spandrel import ModelLoader, ImageModelDescriptor
from comfy import model_management

View File

@ -1,7 +1,5 @@
from PIL import Image, ImageOps
from io import BytesIO
from PIL import Image
import numpy as np
import struct
import comfy.utils
import time

View File

@ -17,7 +17,6 @@ from comfy_execution.graph import get_input_info, ExecutionList, DynamicPrompt,
from comfy_execution.graph_utils import is_link, GraphBuilder
from comfy_execution.caching import HierarchicalCache, LRUCache, CacheKeySetInputSignature, CacheKeySetID
from comfy_execution.validation import validate_node_input
from comfy.cli_args import args
class ExecutionResult(Enum):
SUCCESS = 0

View File

@ -5,20 +5,24 @@ import ctypes
import logging
torch_spec = importlib.util.find_spec("torch")
for folder in torch_spec.submodule_search_locations:
lib_folder = os.path.join(folder, "lib")
test_file = os.path.join(lib_folder, "fbgemm.dll")
dest = os.path.join(lib_folder, "libomp140.x86_64.dll")
if os.path.exists(dest):
break
with open(test_file, 'rb') as f:
contents = f.read()
if b"libomp140.x86_64.dll" not in contents:
def fix_pytorch_libomp():
"""
Fix PyTorch libomp DLL issue on Windows by copying the correct DLL file if needed.
"""
torch_spec = importlib.util.find_spec("torch")
for folder in torch_spec.submodule_search_locations:
lib_folder = os.path.join(folder, "lib")
test_file = os.path.join(lib_folder, "fbgemm.dll")
dest = os.path.join(lib_folder, "libomp140.x86_64.dll")
if os.path.exists(dest):
break
try:
mydll = ctypes.cdll.LoadLibrary(test_file)
except FileNotFoundError as e:
logging.warning("Detected pytorch version with libomp issue, patching.")
shutil.copyfile(os.path.join(lib_folder, "libiomp5md.dll"), dest)
with open(test_file, "rb") as f:
contents = f.read()
if b"libomp140.x86_64.dll" not in contents:
break
try:
mydll = ctypes.cdll.LoadLibrary(test_file)
except FileNotFoundError as e:
logging.warning("Detected pytorch version with libomp issue, patching.")
shutil.copyfile(os.path.join(lib_folder, "libiomp5md.dll"), dest)

View File

@ -4,7 +4,7 @@ import os
import time
import mimetypes
import logging
from typing import Set, List, Dict, Tuple, Literal
from typing import Literal
from collections.abc import Collection
supported_pt_extensions: set[str] = {'.ckpt', '.pt', '.bin', '.pth', '.safetensors', '.pkl', '.sft'}
@ -133,7 +133,7 @@ def get_directory_by_type(type_name: str) -> str | None:
return get_input_directory()
return None
def filter_files_content_types(files: List[str], content_types: Literal["image", "video", "audio"]) -> List[str]:
def filter_files_content_types(files: list[str], content_types: Literal["image", "video", "audio"]) -> list[str]:
"""
Example:
files = os.listdir(folder_paths.get_input_directory())

View File

@ -1,7 +1,5 @@
import torch
from PIL import Image
import struct
import numpy as np
from comfy.cli_args import args, LatentPreviewMethod
from comfy.taesd.taesd import TAESD
import comfy.model_management

View File

@ -87,7 +87,8 @@ if __name__ == "__main__":
if args.windows_standalone_build:
try:
import fix_torch
from fix_torch import fix_pytorch_libomp
fix_pytorch_libomp()
except:
pass

View File

@ -1,6 +1,5 @@
import json
from urllib import request, parse
import random
from urllib import request
#This is the ComfyUI api prompt format.

View File

@ -1,6 +1,5 @@
from copy import deepcopy
from io import BytesIO
from urllib import request
import numpy
import os
from PIL import Image