Browse Source

split tokenizer from encoder

pull/503/head
BlenderNeko 2 years ago
parent
commit
73175cf58c
  1. 6
      comfy/sd.py
  2. 3
      nodes.py

6
comfy/sd.py

@ -372,10 +372,12 @@ class CLIP:
def clip_layer(self, layer_idx):
self.layer_idx = layer_idx
def encode(self, text):
def tokenize(self, text):
return self.tokenizer.tokenize_with_weights(text)
def encode(self, tokens):
if self.layer_idx is not None:
self.cond_stage_model.clip_layer(self.layer_idx)
tokens = self.tokenizer.tokenize_with_weights(text)
try:
self.patcher.patch_model()
cond = self.cond_stage_model.encode_token_weights(tokens)

3
nodes.py

@ -44,7 +44,8 @@ class CLIPTextEncode:
CATEGORY = "conditioning"
def encode(self, clip, text):
return ([[clip.encode(text), {}]], )
tokens = clip.tokenize(text)
return ([[clip.encode(tokens), {}]], )
class ConditioningCombine:
@classmethod

Loading…
Cancel
Save