mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-04-20 03:13:30 +00:00
Rename LLAMATokenizer to SPieceTokenizer.
This commit is contained in:
parent
b2c995f623
commit
14764aa2e2
@ -1,5 +1,5 @@
|
|||||||
from comfy import sd1_clip
|
from comfy import sd1_clip
|
||||||
from .llama_tokenizer import LLAMATokenizer
|
from .spiece_tokenizer import SPieceTokenizer
|
||||||
import comfy.text_encoders.t5
|
import comfy.text_encoders.t5
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@ -11,7 +11,7 @@ class PT5XlModel(sd1_clip.SDClipModel):
|
|||||||
class PT5XlTokenizer(sd1_clip.SDTokenizer):
|
class PT5XlTokenizer(sd1_clip.SDTokenizer):
|
||||||
def __init__(self, embedding_directory=None):
|
def __init__(self, embedding_directory=None):
|
||||||
tokenizer_path = os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_pile_tokenizer"), "tokenizer.model")
|
tokenizer_path = os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_pile_tokenizer"), "tokenizer.model")
|
||||||
super().__init__(tokenizer_path, pad_with_end=False, embedding_size=2048, embedding_key='pile_t5xl', tokenizer_class=LLAMATokenizer, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256, pad_token=1)
|
super().__init__(tokenizer_path, pad_with_end=False, embedding_size=2048, embedding_key='pile_t5xl', tokenizer_class=SPieceTokenizer, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256, pad_token=1)
|
||||||
|
|
||||||
class AuraT5Tokenizer(sd1_clip.SD1Tokenizer):
|
class AuraT5Tokenizer(sd1_clip.SD1Tokenizer):
|
||||||
def __init__(self, embedding_directory=None):
|
def __init__(self, embedding_directory=None):
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
class LLAMATokenizer:
|
class SPieceTokenizer:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_pretrained(path):
|
def from_pretrained(path):
|
||||||
return LLAMATokenizer(path)
|
return SPieceTokenizer(path)
|
||||||
|
|
||||||
def __init__(self, tokenizer_path):
|
def __init__(self, tokenizer_path):
|
||||||
import sentencepiece
|
import sentencepiece
|
Loading…
Reference in New Issue
Block a user