split tokenizer from encoder

This commit is contained in:
BlenderNeko 2023-04-13 22:06:50 +02:00
parent 8489cba140
commit 73175cf58c
2 changed files with 6 additions and 3 deletions

View File

@ -372,10 +372,12 @@ class CLIP:
def clip_layer(self, layer_idx):
self.layer_idx = layer_idx
def encode(self, text):
def tokenize(self, text):
return self.tokenizer.tokenize_with_weights(text)
def encode(self, tokens):
if self.layer_idx is not None:
self.cond_stage_model.clip_layer(self.layer_idx)
tokens = self.tokenizer.tokenize_with_weights(text)
try:
self.patcher.patch_model()
cond = self.cond_stage_model.encode_token_weights(tokens)

View File

@ -44,7 +44,8 @@ class CLIPTextEncode:
CATEGORY = "conditioning"
def encode(self, clip, text):
return ([[clip.encode(text), {}]], )
tokens = clip.tokenize(text)
return ([[clip.encode(tokens), {}]], )
class ConditioningCombine:
@classmethod