You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
716 lines
32 KiB
716 lines
32 KiB
{ |
|
"models": [ |
|
{ |
|
"name": "ID-Animator/animator.ckpt", |
|
"type": "ID-Animator", |
|
"base": "SD1.5", |
|
"save_path": "custom_nodes/ComfyUI_ID_Animator/models", |
|
"description": "ID-Animator checkpoint", |
|
"reference": "https://huggingface.co/spaces/ID-Animator/ID-Animator", |
|
"filename": "animator.ckpt", |
|
"url": "https://huggingface.co/spaces/ID-Animator/ID-Animator/resolve/main/animator.ckpt" |
|
}, |
|
{ |
|
"name": "ID-Animator/mm_sd_v15_v2.ckpt", |
|
"type": "ID-Animator", |
|
"base": "SD1.5", |
|
"save_path": "custom_nodes/ComfyUI_ID_Animator/models/animatediff_models", |
|
"description": "AnimateDiff checkpoint for ID-Animator", |
|
"reference": "https://huggingface.co/spaces/ID-Animator/ID-Animator", |
|
"filename": "mm_sd_v15_v2.ckpt", |
|
"url": "https://huggingface.co/spaces/ID-Animator/ID-Animator/resolve/main/mm_sd_v15_v2.ckpt" |
|
}, |
|
{ |
|
"name": "ID-Animator/image_encoder", |
|
"type": "ID-Animator", |
|
"base": "SD1.5", |
|
"save_path": "custom_nodes/ComfyUI_ID_Animator/models/image_encoder", |
|
"description": "CLIP Image encoder for ID-Animator", |
|
"reference": "https://huggingface.co/spaces/ID-Animator/ID-Animator", |
|
"filename": "model.safetensors", |
|
"url": "https://huggingface.co/spaces/ID-Animator/ID-Animator/resolve/main/image_encoder/model.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "IC-Light/fc", |
|
"type": "IC-Light", |
|
"base": "SD1.5", |
|
"save_path": "unet/IC-Light", |
|
"description": "The default relighting model, conditioned on text and foreground", |
|
"reference": "https://huggingface.co/lllyasviel/ic-light", |
|
"filename": "iclight_sd15_fc.safetensors", |
|
"url": "https://huggingface.co/lllyasviel/ic-light/resolve/main/iclight_sd15_fc.safetensors" |
|
}, |
|
{ |
|
"name": "IC-Light/fbc", |
|
"type": "IC-Light", |
|
"base": "SD1.5", |
|
"save_path": "unet/IC-Light", |
|
"description": "Relighting model conditioned with text, foreground, and background", |
|
"reference": "https://huggingface.co/lllyasviel/ic-light", |
|
"filename": "iclight_sd15_fbc.safetensors", |
|
"url": "https://huggingface.co/lllyasviel/ic-light/resolve/main/iclight_sd15_fbc.safetensors" |
|
}, |
|
{ |
|
"name": "IC-Light/fcon", |
|
"type": "IC-Light", |
|
"base": "SD1.5", |
|
"save_path": "unet/IC-Light", |
|
"description": "Same as iclight_sd15_fc.safetensors, but trained with offset noise", |
|
"reference": "https://huggingface.co/lllyasviel/ic-light", |
|
"filename": "iclight_sd15_fcon.safetensors", |
|
"url": "https://huggingface.co/lllyasviel/ic-light/resolve/main/iclight_sd15_fcon.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "MonsterMMORPG/insightface (for InstantID)", |
|
"type": "insightface", |
|
"base": "SDXL", |
|
"save_path": "insightface/models", |
|
"description": "MonsterMMORPG insightface model for cubiq/InstantID", |
|
"reference": "https://huggingface.co/MonsterMMORPG/tools/tree/main", |
|
"filename": "antelopev2.zip", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/antelopev2.zip" |
|
}, |
|
{ |
|
"name": "InstantID/ip-adapter", |
|
"type": "instantid", |
|
"base": "SDXL", |
|
"save_path": "instantid/SDXL", |
|
"description": "ip-adapter model for cubiq/InstantID", |
|
"reference": "https://huggingface.co/InstantX/InstantID", |
|
"filename": "ip-adapter.bin", |
|
"url": "https://huggingface.co/InstantX/InstantID/resolve/main/ip-adapter.bin" |
|
}, |
|
{ |
|
"name": "InstantID/ControlNet", |
|
"type": "controlnet", |
|
"base": "SDXL", |
|
"save_path": "controlnet/SDXL/instantid", |
|
"description": "instantid controlnet model for cubiq/InstantID", |
|
"reference": "https://huggingface.co/InstantX/InstantID", |
|
"filename": "diffusion_pytorch_model.safetensors", |
|
"url": "https://huggingface.co/InstantX/InstantID/resolve/main/ControlNetModel/diffusion_pytorch_model.safetensors" |
|
}, |
|
{ |
|
"name": "ip_plus_composition_sd15.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/ostris/ip-composition-adapter", |
|
"filename": "ip_plus_composition_sd15.safetensors", |
|
"url": "https://huggingface.co/ostris/ip-composition-adapter/resolve/main/ip_plus_composition_sd15.safetensors" |
|
}, |
|
{ |
|
"name": "ip_plus_composition_sdxl.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SDXL", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/ostris/ip-composition-adapter", |
|
"filename": "ip_plus_composition_sdxl.safetensors", |
|
"url": "https://huggingface.co/ostris/ip-composition-adapter/resolve/main/ip_plus_composition_sdxl.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-portrait-v11_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Portrait V11 Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-portrait-v11_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait-v11_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-portrait_sdxl.bin", |
|
"type": "IP-Adapter", |
|
"base": "SDXL", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Portrait Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-portrait_sdxl.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sdxl.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-portrait_sdxl_unnorm.bin", |
|
"type": "IP-Adapter", |
|
"base": "SDXL", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Portrait Model (SDXL/unnorm) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-portrait_sdxl_unnorm.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sdxl_unnorm.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter_sd15_light_v11.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter_sd15_light_v11.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_light_v11.bin" |
|
}, |
|
|
|
|
|
{ |
|
"name": "Kijai/SUPIR-v0F_fp16.safetensors (pruned)", |
|
"type": "checkpoints", |
|
"base": "SUPIR", |
|
"save_path": "checkpoints/SUPIR", |
|
"description": "SUPIR checkpoint model", |
|
"reference": "https://huggingface.co/Kijai/SUPIR_pruned/tree/main", |
|
"filename": "SUPIR-v0F_fp16.safetensors", |
|
"url": "https://huggingface.co/Kijai/SUPIR_pruned/resolve/main/SUPIR-v0F_fp16.safetensors" |
|
}, |
|
{ |
|
"name": "Kijai/SUPIR-v0Q_fp16.safetensors (pruned)", |
|
"type": "checkpoints", |
|
"base": "SUPIR", |
|
"save_path": "checkpoints/SUPIR", |
|
"description": "SUPIR checkpoint model", |
|
"reference": "https://huggingface.co/Kijai/SUPIR_pruned/tree/main", |
|
"filename": "SUPIR-v0Q_fp16.safetensors", |
|
"url": "https://huggingface.co/Kijai/SUPIR_pruned/resolve/main/SUPIR-v0Q_fp16.safetensors" |
|
}, |
|
{ |
|
"name": "SUPIR-v0F.ckpt", |
|
"type": "checkpoints", |
|
"base": "SUPIR", |
|
"save_path": "checkpoints/SUPIR", |
|
"description": "SUPIR checkpoint model", |
|
"reference": "https://huggingface.co/camenduru/SUPIR/tree/main", |
|
"filename": "SUPIR-v0F.ckpt", |
|
"url": "https://huggingface.co/camenduru/SUPIR/resolve/main/SUPIR-v0F.ckpt" |
|
}, |
|
{ |
|
"name": "SUPIR-v0Q.ckpt", |
|
"type": "checkpoints", |
|
"base": "SUPIR", |
|
"save_path": "checkpoints/SUPIR", |
|
"description": "SUPIR checkpoint model", |
|
"reference": "https://huggingface.co/camenduru/SUPIR/tree/main", |
|
"filename": "SUPIR-v0Q.ckpt", |
|
"url": "https://huggingface.co/camenduru/SUPIR/resolve/main/SUPIR-v0Q.ckpt" |
|
},{ |
|
"name": "Depth-FM-v1 fp16 safetensors", |
|
"type": "checkpoints", |
|
"base": "Depth-FM", |
|
"save_path": "checkpoints/depthfm", |
|
"description": "Depth-FM monocular depth estimation model", |
|
"reference": "https://huggingface.co/Kijai/depth-fm-pruned", |
|
"filename": "depthfm-v1_fp16.safetensors", |
|
"url": "https://huggingface.co/Kijai/depth-fm-pruned/resolve/main/depthfm-v1_fp16.safetensors" |
|
}, |
|
{ |
|
"name": "Depth-FM-v1 fp32 safetensors", |
|
"type": "checkpoints", |
|
"base": "Depth-FM", |
|
"save_path": "checkpoints/depthfm", |
|
"description": "Depth-FM monocular depth estimation model", |
|
"reference": "https://huggingface.co/Kijai/depth-fm-pruned", |
|
"filename": "depthfm-v1_fp32.safetensors", |
|
"url": "https://huggingface.co/Kijai/depth-fm-pruned/resolve/main/depthfm-v1_fp32.safetensors" |
|
}, |
|
{ |
|
"name": "monster-labs - Controlnet QR Code Monster v1 For SDXL", |
|
"type": "controlnet", |
|
"base": "SDXL", |
|
"save_path": "default", |
|
"description": "monster-labs - Controlnet QR Code Monster v1 For SDXL", |
|
"reference": "https://huggingface.co/monster-labs/control_v1p_sdxl_qrcode_monster", |
|
"filename": "control_v1p_sdxl_qrcode_monster.safetensors", |
|
"url": "https://huggingface.co/monster-labs/control_v1p_sdxl_qrcode_monster/resolve/main/diffusion_pytorch_model.safetensors" |
|
}, |
|
{ |
|
"name": "DynamiCrafter 1024 bf16 safetensors", |
|
"type": "checkpoints", |
|
"base": "DynamiCrafter", |
|
"save_path": "checkpoints/dynamicrafter", |
|
"description": "DynamiCrafter image2video model 1024x575", |
|
"reference": "https://huggingface.co/Kijai/DynamiCrafter_pruned/", |
|
"filename": "dynamicrafter_1024_v1_bf16.safetensors", |
|
"url": "https://huggingface.co/Kijai/DynamiCrafter_pruned/resolve/main/dynamicrafter_1024_v1_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "DynamiCrafter 512 interpolation bf16 safetensors", |
|
"type": "checkpoints", |
|
"base": "DynamiCrafter", |
|
"save_path": "checkpoints/dynamicrafter", |
|
"description": "DynamiCrafter image2video interpolation model 512", |
|
"reference": "https://huggingface.co/Kijai/DynamiCrafter_pruned/", |
|
"filename": "dynamicrafter_512_interp_v1_bf16.safetensors", |
|
"url": "https://huggingface.co/Kijai/DynamiCrafter_pruned/resolve/main/dynamicrafter_512_interp_v1_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "MobileSAM", |
|
"type": "sam", |
|
"base": "SAM", |
|
"save_path": "sams", |
|
"description": "MobileSAM", |
|
"reference": "https://github.com/ChaoningZhang/MobileSAM/", |
|
"filename": "mobile_sam.pt", |
|
"url": "https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt" |
|
}, |
|
|
|
{ |
|
"name": "BLIP ImageCaption (COCO) w/ ViT-B and CapFilt-L", |
|
"type": "BLIP_MODEL", |
|
"base": "blip_model", |
|
"save_path": "blip", |
|
"description": "BLIP ImageCaption (COCO) w/ ViT-B and CapFilt-L", |
|
"reference": "https://github.com/salesforce/BLIP", |
|
"filename": "model_base_capfilt_large.pth", |
|
"url": "https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_capfilt_large.pth" |
|
}, |
|
{ |
|
"name": "GroundingDINO SwinT OGC - Model", |
|
"type": "GroundingDINO", |
|
"base": "DINO", |
|
"save_path": "groundingdino", |
|
"description": "GroundingDINO SwinT OGC Model", |
|
"reference": "https://huggingface.co/ShilongLiu/GroundingDINO", |
|
"filename": "groundingdino_swint_ogc.pth", |
|
"url": "https://huggingface.co/ShilongLiu/GroundingDINO/resolve/main/groundingdino_swint_ogc.pth" |
|
}, |
|
{ |
|
"name": "GroundingDINO SwinT OGC - CFG File", |
|
"type": "GroundingDINO", |
|
"base": "DINO", |
|
"save_path": "groundingdino", |
|
"description": "GroundingDINO SwinT OGC CFG File", |
|
"reference": "https://huggingface.co/ShilongLiu/GroundingDINO/resolve/main/GroundingDINO_SwinT_OGC.cfg.py", |
|
"filename": "GroundingDINO_SwinT_OGC.cfg.py", |
|
"url": "https://huggingface.co/ShilongLiu/GroundingDINO/raw/main/GroundingDINO_SwinT_OGC.cfg.py" |
|
}, |
|
{ |
|
"name": "SDXL Lightning LoRA (2step)", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/SDXL-Lightning", |
|
"description": "SDXL Lightning LoRA (2step)", |
|
"reference": "https://huggingface.co/ByteDance/SDXL-Lightning", |
|
"filename": "sdxl_lightning_2step_lora.safetensors", |
|
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_2step_lora.safetensors" |
|
}, |
|
{ |
|
"name": "SDXL Lightning LoRA (4step)", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/SDXL-Lightning", |
|
"description": "SDXL Lightning LoRA (4step)", |
|
"reference": "https://huggingface.co/ByteDance/SDXL-Lightning", |
|
"filename": "sdxl_lightning_4step_lora.safetensors", |
|
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_4step_lora.safetensors" |
|
}, |
|
{ |
|
"name": "SDXL Lightning LoRA (8step)", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/SDXL-Lightning", |
|
"description": "SDXL Lightning LoRA (8tep)", |
|
"reference": "https://huggingface.co/ByteDance/SDXL-Lightning", |
|
"filename": "sdxl_lightning_8step_lora.safetensors", |
|
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_8step_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "shape_predictor_68_face_landmarks.dat [Face Analysis]", |
|
"type": "Shape Predictor", |
|
"base": "DLIB", |
|
"save_path": "custom_nodes/ComfyUI_FaceAnalysis/dlib", |
|
"description": "To use the Face Analysis for ComfyUI custom node, installation of this model is needed.", |
|
"reference": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/tree/main", |
|
"filename": "shape_predictor_68_face_landmarks.dat", |
|
"url": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/resolve/main/shape_predictor_68_face_landmarks.dat" |
|
}, |
|
{ |
|
"name": "dlib_face_recognition_resnet_model_v1.dat [Face Analysis]", |
|
"type": "Face Recognition", |
|
"base": "DLIB", |
|
"save_path": "custom_nodes/ComfyUI_FaceAnalysis/dlib", |
|
"description": "To use the Face Analysis for ComfyUI custom node, installation of this model is needed.", |
|
"reference": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/tree/main", |
|
"filename": "dlib_face_recognition_resnet_model_v1.dat", |
|
"url": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/resolve/main/dlib_face_recognition_resnet_model_v1.dat" |
|
}, |
|
|
|
{ |
|
"name": "efficient_sam_s_cpu.jit [ComfyUI-YoloWorld-EfficientSAM]", |
|
"type": "efficient_sam", |
|
"base": "efficient_sam", |
|
"save_path": "custom_nodes/ComfyUI-YoloWorld-EfficientSAM", |
|
"description": "Install efficient_sam_s_cpu.jit into ComfyUI-YoloWorld-EfficientSAM", |
|
"reference": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/tree/main", |
|
"filename": "efficient_sam_s_cpu.jit", |
|
"url": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/resolve/main/efficient_sam_s_cpu.jit" |
|
}, |
|
{ |
|
"name": "efficient_sam_s_gpu.jit [ComfyUI-YoloWorld-EfficientSAM]", |
|
"type": "efficient_sam", |
|
"base": "efficient_sam", |
|
"save_path": "custom_nodes/ComfyUI-YoloWorld-EfficientSAM", |
|
"description": "Install efficient_sam_s_gpu.jit into ComfyUI-YoloWorld-EfficientSAM", |
|
"reference": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/tree/main", |
|
"filename": "efficient_sam_s_gpu.jit", |
|
"url": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/resolve/main/efficient_sam_s_gpu.jit" |
|
}, |
|
|
|
{ |
|
"name": "stabilityai/comfyui_checkpoints/stable_cascade_stage_b.safetensors", |
|
"type": "checkpoints", |
|
"base": "Stable Cascade", |
|
"save_path": "checkpoints/Stable-Cascade", |
|
"description": "[4.55GB] Stable Cascade stage_b checkpoints", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stable_cascade_stage_b.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/comfyui_checkpoints/stable_cascade_stage_b.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/comfyui_checkpoints/stable_cascade_stage_c.safetensors", |
|
"type": "checkpoints", |
|
"base": "Stable Cascade", |
|
"save_path": "checkpoints/Stable-Cascade", |
|
"description": "[9.22GB] Stable Cascade stage_c checkpoints", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stable_cascade_stage_c.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/comfyui_checkpoints/stable_cascade_stage_c.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: effnet_encoder.safetensors (VAE)", |
|
"type": "VAE", |
|
"base": "Stable Cascade", |
|
"save_path": "vae/Stable-Cascade", |
|
"description": "[81.5MB] Stable Cascade: effnet_encoder.\nVAE encoder for stage_c latent.", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "effnet_encoder.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/effnet_encoder.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_a.safetensors (VAE)", |
|
"type": "VAE", |
|
"base": "Stable Cascade", |
|
"save_path": "vae/Stable-Cascade", |
|
"description": "[73.7MB] Stable Cascade: stage_a", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_a.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_a.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[6.25GB] Stable Cascade: stage_b", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b_bf16.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[3.13GB] Stable Cascade: stage_b/bf16", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[2.8GB] Stable Cascade: stage_b/lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b_lite.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b_lite.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[1.4GB] Stable Cascade: stage_b/bf16,lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b_lite_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b_lite_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[14.4GB] Stable Cascade: stage_c", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c_bf16.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[7.18GB] Stable Cascade: stage_c/bf16", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[4.12GB] Stable Cascade: stage_c/lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c_lite.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c_lite.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[2.06GB] Stable Cascade: stage_c/bf16,lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c_lite_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c_lite_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: text_encoder (CLIP)", |
|
"type": "clip", |
|
"base": "Stable Cascade", |
|
"save_path": "clip/Stable-Cascade", |
|
"description": "[1.39GB] Stable Cascade: text_encoder", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "model.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/text_encoder/model.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "1k3d68.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 1k3d68.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "1k3d68.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/1k3d68.onnx" |
|
}, |
|
{ |
|
"name": "2d106det.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 2d106det.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "2d106det.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/2d106det.onnx" |
|
}, |
|
{ |
|
"name": "genderage.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 genderage.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "genderage.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/genderage.onnx" |
|
}, |
|
{ |
|
"name": "glintr100.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 glintr100.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "glintr100.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/glintr100.onnx" |
|
}, |
|
{ |
|
"name": "scrfd_10g_bnkps.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 scrfd_10g_bnkps.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "scrfd_10g_bnkps.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/scrfd_10g_bnkps.onnx" |
|
}, |
|
|
|
{ |
|
"name": "photomaker-v1.bin", |
|
"type": "photomaker", |
|
"base": "SDXL", |
|
"save_path": "photomaker", |
|
"description": "PhotoMaker model. This model is compatible with SDXL.", |
|
"reference": "https://huggingface.co/TencentARC/PhotoMaker", |
|
"filename": "photomaker-v1.bin", |
|
"url": "https://huggingface.co/TencentARC/PhotoMaker/resolve/main/photomaker-v1.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sdxl.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sdxl.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sdxl.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Plus Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sdxl.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sdxl_lora.safetensors", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID LoRA Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sdxl_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl_lora.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID-Plus V2 LoRA Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "TencentARC/motionctrl.pth", |
|
"type": "checkpoints", |
|
"base": "MotionCtrl", |
|
"save_path": "checkpoints/motionctrl", |
|
"description": "To use the ComfyUI-MotionCtrl extension, downloading this model is required.", |
|
"reference": "https://huggingface.co/TencentARC/MotionCtrl", |
|
"filename": "motionctrl.pth", |
|
"url": "https://huggingface.co/TencentARC/MotionCtrl/resolve/main/motionctrl.pth" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID-Plus V2 Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sd15_lora.safetensors", |
|
"type": "lora", |
|
"base": "SD1.5", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID-Plus V2 LoRA Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sd15_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15_lora.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plus_sd15_lora.safetensors", |
|
"type": "lora", |
|
"base": "SD1.5", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID Plus LoRA Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plus_sd15_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "ControlNet-HandRefiner-pruned (inpaint-depth-hand; fp16)", |
|
"type": "controlnet", |
|
"base": "SD1.5", |
|
"save_path": "default", |
|
"description": "This inpaint-depth controlnet model is specialized for the hand refiner.", |
|
"reference": "https://huggingface.co/hr16/ControlNet-HandRefiner-pruned", |
|
"filename": "control_sd15_inpaint_depth_hand_fp16.safetensors", |
|
"url": "https://huggingface.co/hr16/ControlNet-HandRefiner-pruned/resolve/main/control_sd15_inpaint_depth_hand_fp16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/stable-diffusion-x4-upscaler", |
|
"type": "checkpoints", |
|
"base": "upscale", |
|
"save_path": "checkpoints/upscale", |
|
"description": "[3.53GB] This upscaling model is a latent text-guided diffusion model and should be used with SD_4XUpscale_Conditioning and KSampler.", |
|
"reference": "https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler", |
|
"filename": "x4-upscaler-ema.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler/resolve/main/x4-upscaler-ema.safetensors" |
|
}, |
|
{ |
|
"name": "LDSR(Latent Diffusion Super Resolution)", |
|
"type": "upscale", |
|
"base": "upscale", |
|
"save_path": "upscale_models/ldsr", |
|
"description": "LDSR upscale model. Through the [a/ComfyUI-Flowty-LDSR](https://github.com/flowtyone/ComfyUI-Flowty-LDSR) extension, the upscale model can be utilized.", |
|
"reference": "https://github.com/CompVis/latent-diffusion", |
|
"filename": "last.ckpt", |
|
"url": "https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1" |
|
}, |
|
{ |
|
"name": "control_boxdepth_LooseControlfp16 (fp16)", |
|
"type": "controlnet", |
|
"base": "SD1.5", |
|
"save_path": "default", |
|
"description": "Loose ControlNet model", |
|
"reference": "https://huggingface.co/ioclab/LooseControl_WebUICombine", |
|
"filename": "control_boxdepth_LooseControlfp16.safetensors", |
|
"url": "https://huggingface.co/ioclab/LooseControl_WebUICombine/resolve/main/control_boxdepth_LooseControlfp16.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "ip-adapter-faceid-portrait_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Portrait Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-portrait_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plus_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Plus Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plus_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sd15_lora.safetensors", |
|
"type": "lora", |
|
"base": "SD1.5", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID LoRA Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sd15_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15_lora.safetensors" |
|
} |
|
] |
|
}
|
|
|