diff --git a/comfy/sd.py b/comfy/sd.py index 87e1f0b8..300ceaef 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -252,18 +252,15 @@ class CLIP: else: params = {} - tokenizer_params = {} - if self.target_clip == "ldm.modules.encoders.modules.FrozenOpenCLIPEmbedder": clip = sd2_clip.SD2ClipModel tokenizer = sd2_clip.SD2Tokenizer elif self.target_clip == "ldm.modules.encoders.modules.FrozenCLIPEmbedder": clip = sd1_clip.SD1ClipModel tokenizer = sd1_clip.SD1Tokenizer - tokenizer_params['embedding_directory'] = embedding_directory self.cond_stage_model = clip(**(params)) - self.tokenizer = tokenizer(**(tokenizer_params)) + self.tokenizer = tokenizer(embedding_directory=embedding_directory) self.patcher = ModelPatcher(self.cond_stage_model) def clone(self): diff --git a/comfy/sd2_clip.py b/comfy/sd2_clip.py index 8b90eeb8..fda793eb 100644 --- a/comfy/sd2_clip.py +++ b/comfy/sd2_clip.py @@ -30,7 +30,5 @@ class SD2ClipModel(sd1_clip.SD1ClipModel): self.layer_idx = layer_idx class SD2Tokenizer(sd1_clip.SD1Tokenizer): - def __init__(self, tokenizer_path=None): - super().__init__(tokenizer_path, pad_with_end=False) - - + def __init__(self, tokenizer_path=None, embedding_directory=None): + super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory)