Browse Source

Allow model or clip to be None in load_lora_for_models.

pull/1890/head
comfyanonymous 1 year ago
parent
commit
2455aaed8a
  1. 25
      comfy/sd.py

25
comfy/sd.py

@ -55,13 +55,26 @@ def load_clip_weights(model, sd):
def load_lora_for_models(model, clip, lora, strength_model, strength_clip): def load_lora_for_models(model, clip, lora, strength_model, strength_clip):
key_map = comfy.lora.model_lora_keys_unet(model.model) key_map = {}
key_map = comfy.lora.model_lora_keys_clip(clip.cond_stage_model, key_map) if model is not None:
key_map = comfy.lora.model_lora_keys_unet(model.model, key_map)
if clip is not None:
key_map = comfy.lora.model_lora_keys_clip(clip.cond_stage_model, key_map)
loaded = comfy.lora.load_lora(lora, key_map) loaded = comfy.lora.load_lora(lora, key_map)
new_modelpatcher = model.clone() if model is not None:
k = new_modelpatcher.add_patches(loaded, strength_model) new_modelpatcher = model.clone()
new_clip = clip.clone() k = new_modelpatcher.add_patches(loaded, strength_model)
k1 = new_clip.add_patches(loaded, strength_clip) else:
k = ()
new_modelpatcher = None
if clip is not None:
new_clip = clip.clone()
k1 = new_clip.add_patches(loaded, strength_clip)
else:
k1 = ()
new_clip = None
k = set(k) k = set(k)
k1 = set(k1) k1 = set(k1)
for x in loaded: for x in loaded:

Loading…
Cancel
Save