Fix clip_skip no longer being loaded from yaml file.

This commit is contained in:
comfyanonymous 2023-03-06 11:34:02 -05:00
parent afff30fc0a
commit 501f19eec6

View File

@ -266,7 +266,7 @@ class CLIP:
self.cond_stage_model = clip(**(params)) self.cond_stage_model = clip(**(params))
self.tokenizer = tokenizer(embedding_directory=embedding_directory) self.tokenizer = tokenizer(embedding_directory=embedding_directory)
self.patcher = ModelPatcher(self.cond_stage_model) self.patcher = ModelPatcher(self.cond_stage_model)
self.layer_idx = -1 self.layer_idx = None
def clone(self): def clone(self):
n = CLIP(no_init=True) n = CLIP(no_init=True)
@ -287,6 +287,7 @@ class CLIP:
self.layer_idx = layer_idx self.layer_idx = layer_idx
def encode(self, text): def encode(self, text):
if self.layer_idx is not None:
self.cond_stage_model.clip_layer(self.layer_idx) self.cond_stage_model.clip_layer(self.layer_idx)
tokens = self.tokenizer.tokenize_with_weights(text) tokens = self.tokenizer.tokenize_with_weights(text)
try: try: