Compare commits

...

6 Commits

Author SHA1 Message Date
Dr.Lt.Data
c758f2a647
Merge 1670612b90 into 2307ff6746 2025-01-08 19:46:48 -05:00
comfyanonymous
2307ff6746 Improve some logging messages. 2025-01-08 19:05:22 -05:00
Dr.Lt.Data
1670612b90 resolve conflict 2024-09-24 21:25:16 +09:00
Dr.Lt.Data
565d67478a
Merge branch 'master' into improve/extra_model_paths_template 2024-09-24 21:22:31 +09:00
Dr.Lt.Data
128075b781
Update extra_model_paths.yaml.example 2024-08-22 13:41:38 +09:00
Dr.Lt.Data
b86567092b add missing folder paths in extra_model_paths template 2024-08-15 10:03:50 +09:00
3 changed files with 9 additions and 3 deletions

View File

@ -111,7 +111,7 @@ class CLIP:
model_management.load_models_gpu([self.patcher], force_full_load=True)
self.layer_idx = None
self.use_clip_schedule = False
logging.info("CLIP model load device: {}, offload device: {}, current: {}, dtype: {}".format(load_device, offload_device, params['device'], dtype))
logging.info("CLIP/text encoder model load device: {}, offload device: {}, current: {}, dtype: {}".format(load_device, offload_device, params['device'], dtype))
def clone(self):
n = CLIP(no_init=True)
@ -898,7 +898,7 @@ def load_state_dict_guess_config(sd, output_vae=True, output_clip=True, output_c
if output_model:
model_patcher = comfy.model_patcher.ModelPatcher(model, load_device=load_device, offload_device=model_management.unet_offload_device())
if inital_load_device != torch.device("cpu"):
logging.info("loaded straight to GPU")
logging.info("loaded diffusion model directly to GPU")
model_management.load_models_gpu([model_patcher], force_full_load=True)
return (model_patcher, clip, vae, clipvision)

View File

@ -36,9 +36,14 @@ a111:
# models/diffusion_models
# models/unet
# embeddings: models/embeddings/
# gligen: models/gligen/
# hypernetworks: models/hypernetworks/
# loras: models/loras/
# photomaker: models/photomaker/
# style_models: models/style_models/
# upscale_models: models/upscale_models/
# vae: models/vae/
# vae_approx: models/vae_approx/
#other_ui:
# base_path: path/to/ui

View File

@ -4,7 +4,8 @@ lint.ignore = ["ALL"]
# Enable specific rules
lint.select = [
"S307", # suspicious-eval-usage
"T201", # print-usage
"S102", # exec
"T", # print-usage
"W",
# The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names.
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f