ComfyUI/comfy/text_encoders/spiece_tokenizer.py

23 lines
629 B
Python
Raw Normal View History

2024-07-12 05:08:45 +00:00
import os
class SPieceTokenizer:
2024-07-12 05:08:45 +00:00
@staticmethod
def from_pretrained(path):
return SPieceTokenizer(path)
2024-07-12 05:08:45 +00:00
def __init__(self, tokenizer_path):
import sentencepiece
self.tokenizer = sentencepiece.SentencePieceProcessor(model_file=tokenizer_path)
self.end = self.tokenizer.eos_id()
def get_vocab(self):
out = {}
for i in range(self.tokenizer.get_piece_size()):
out[self.tokenizer.id_to_piece(i)] = i
return out
def __call__(self, string):
out = self.tokenizer.encode(string)
out += [self.end]
return {"input_ids": out}