You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
726 lines
34 KiB
726 lines
34 KiB
{ |
|
"models": [ |
|
{ |
|
"name": "SUPIR-v0F.ckpt", |
|
"type": "checkpoints", |
|
"base": "SUPIR", |
|
"save_path": "checkpoints/SUPIR", |
|
"description": "SUPIR checkpoint model", |
|
"reference": "https://huggingface.co/camenduru/SUPIR/tree/main", |
|
"filename": "SUPIR-v0F.ckpt", |
|
"url": "https://huggingface.co/camenduru/SUPIR/resolve/main/SUPIR-v0F.ckpt" |
|
}, |
|
{ |
|
"name": "SUPIR-v0Q.ckpt", |
|
"type": "checkpoints", |
|
"base": "SUPIR", |
|
"save_path": "checkpoints/SUPIR", |
|
"description": "SUPIR checkpoint model", |
|
"reference": "https://huggingface.co/camenduru/SUPIR/tree/main", |
|
"filename": "SUPIR-v0Q.ckpt", |
|
"url": "https://huggingface.co/camenduru/SUPIR/resolve/main/SUPIR-v0Q.ckpt" |
|
},{ |
|
"name": "Depth-FM-v1 fp16 safetensors", |
|
"type": "checkpoints", |
|
"base": "Depth-FM", |
|
"save_path": "checkpoints/depthfm", |
|
"description": "Depth-FM monocular depth estimation model", |
|
"reference": "https://huggingface.co/Kijai/depth-fm-pruned", |
|
"filename": "depthfm-v1_fp16.safetensors", |
|
"url": "https://huggingface.co/Kijai/depth-fm-pruned/resolve/main/depthfm-v1_fp16.safetensors" |
|
}, |
|
{ |
|
"name": "Depth-FM-v1 fp32 safetensors", |
|
"type": "checkpoints", |
|
"base": "Depth-FM", |
|
"save_path": "checkpoints/depthfm", |
|
"description": "Depth-FM monocular depth estimation model", |
|
"reference": "https://huggingface.co/Kijai/depth-fm-pruned", |
|
"filename": "depthfm-v1_fp32.safetensors", |
|
"url": "https://huggingface.co/Kijai/depth-fm-pruned/resolve/main/depthfm-v1_fp32.safetensors" |
|
}, |
|
{ |
|
"name": "monster-labs - Controlnet QR Code Monster v1 For SDXL", |
|
"type": "controlnet", |
|
"base": "SDXL", |
|
"save_path": "default", |
|
"description": "monster-labs - Controlnet QR Code Monster v1 For SDXL", |
|
"reference": "https://huggingface.co/monster-labs/control_v1p_sdxl_qrcode_monster", |
|
"filename": "control_v1p_sdxl_qrcode_monster.safetensors", |
|
"url": "https://huggingface.co/monster-labs/control_v1p_sdxl_qrcode_monster/resolve/main/diffusion_pytorch_model.safetensors" |
|
}, |
|
{ |
|
"name": "DynamiCrafter 1024 bf16 safetensors", |
|
"type": "checkpoints", |
|
"base": "DynamiCrafter", |
|
"save_path": "checkpoints/dynamicrafter", |
|
"description": "DynamiCrafter image2video model 1024x575", |
|
"reference": "https://huggingface.co/Kijai/DynamiCrafter_pruned/", |
|
"filename": "dynamicrafter_1024_v1_bf16.safetensors", |
|
"url": "https://huggingface.co/Kijai/DynamiCrafter_pruned/resolve/main/dynamicrafter_1024_v1_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "DynamiCrafter 512 interpolation bf16 safetensors", |
|
"type": "checkpoints", |
|
"base": "DynamiCrafter", |
|
"save_path": "checkpoints/dynamicrafter", |
|
"description": "DynamiCrafter image2video interpolation model 512", |
|
"reference": "https://huggingface.co/Kijai/DynamiCrafter_pruned/", |
|
"filename": "dynamicrafter_512_interp_v1_bf16.safetensors", |
|
"url": "https://huggingface.co/Kijai/DynamiCrafter_pruned/resolve/main/dynamicrafter_512_interp_v1_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "MobileSAM", |
|
"type": "sam", |
|
"base": "SAM", |
|
"save_path": "sams", |
|
"description": "MobileSAM", |
|
"reference": "https://github.com/ChaoningZhang/MobileSAM/", |
|
"filename": "mobile_sam.pt", |
|
"url": "https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt" |
|
}, |
|
|
|
{ |
|
"name": "BLIP ImageCaption (COCO) w/ ViT-B and CapFilt-L", |
|
"type": "BLIP_MODEL", |
|
"base": "blip_model", |
|
"save_path": "blip", |
|
"description": "BLIP ImageCaption (COCO) w/ ViT-B and CapFilt-L", |
|
"reference": "https://github.com/salesforce/BLIP", |
|
"filename": "model_base_capfilt_large.pth", |
|
"url": "https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_capfilt_large.pth" |
|
}, |
|
{ |
|
"name": "GroundingDINO SwinT OGC - Model", |
|
"type": "GroundingDINO", |
|
"base": "DINO", |
|
"save_path": "groundingdino", |
|
"description": "GroundingDINO SwinT OGC Model", |
|
"reference": "https://huggingface.co/ShilongLiu/GroundingDINO", |
|
"filename": "groundingdino_swint_ogc.pth", |
|
"url": "https://huggingface.co/ShilongLiu/GroundingDINO/resolve/main/groundingdino_swint_ogc.pth" |
|
}, |
|
{ |
|
"name": "GroundingDINO SwinT OGC - CFG File", |
|
"type": "GroundingDINO", |
|
"base": "DINO", |
|
"save_path": "groundingdino", |
|
"description": "GroundingDINO SwinT OGC CFG File", |
|
"reference": "https://huggingface.co/ShilongLiu/GroundingDINO/resolve/main/GroundingDINO_SwinT_OGC.cfg.py", |
|
"filename": "GroundingDINO_SwinT_OGC.cfg.py", |
|
"url": "https://huggingface.co/ShilongLiu/GroundingDINO/raw/main/GroundingDINO_SwinT_OGC.cfg.py" |
|
}, |
|
{ |
|
"name": "SDXL Lightning LoRA (2step)", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/SDXL-Lightning", |
|
"description": "SDXL Lightning LoRA (2step)", |
|
"reference": "https://huggingface.co/ByteDance/SDXL-Lightning", |
|
"filename": "sdxl_lightning_2step_lora.safetensors", |
|
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_2step_lora.safetensors" |
|
}, |
|
{ |
|
"name": "SDXL Lightning LoRA (4step)", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/SDXL-Lightning", |
|
"description": "SDXL Lightning LoRA (4step)", |
|
"reference": "https://huggingface.co/ByteDance/SDXL-Lightning", |
|
"filename": "sdxl_lightning_4step_lora.safetensors", |
|
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_4step_lora.safetensors" |
|
}, |
|
{ |
|
"name": "SDXL Lightning LoRA (8step)", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/SDXL-Lightning", |
|
"description": "SDXL Lightning LoRA (8tep)", |
|
"reference": "https://huggingface.co/ByteDance/SDXL-Lightning", |
|
"filename": "sdxl_lightning_8step_lora.safetensors", |
|
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_8step_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "shape_predictor_68_face_landmarks.dat [Face Analysis]", |
|
"type": "Shape Predictor", |
|
"base": "DLIB", |
|
"save_path": "custom_nodes/ComfyUI_FaceAnalysis/dlib", |
|
"description": "To use the Face Analysis for ComfyUI custom node, installation of this model is needed.", |
|
"reference": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/tree/main", |
|
"filename": "shape_predictor_68_face_landmarks.dat", |
|
"url": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/resolve/main/shape_predictor_68_face_landmarks.dat" |
|
}, |
|
{ |
|
"name": "dlib_face_recognition_resnet_model_v1.dat [Face Analysis]", |
|
"type": "Face Recognition", |
|
"base": "DLIB", |
|
"save_path": "custom_nodes/ComfyUI_FaceAnalysis/dlib", |
|
"description": "To use the Face Analysis for ComfyUI custom node, installation of this model is needed.", |
|
"reference": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/tree/main", |
|
"filename": "dlib_face_recognition_resnet_model_v1.dat", |
|
"url": "https://huggingface.co/matt3ounstable/dlib_predictor_recognition/resolve/main/dlib_face_recognition_resnet_model_v1.dat" |
|
}, |
|
|
|
{ |
|
"name": "efficient_sam_s_cpu.jit [ComfyUI-YoloWorld-EfficientSAM]", |
|
"type": "efficient_sam", |
|
"base": "efficient_sam", |
|
"save_path": "custom_nodes/ComfyUI-YoloWorld-EfficientSAM", |
|
"description": "Install efficient_sam_s_cpu.jit into ComfyUI-YoloWorld-EfficientSAM", |
|
"reference": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/tree/main", |
|
"filename": "efficient_sam_s_cpu.jit", |
|
"url": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/resolve/main/efficient_sam_s_cpu.jit" |
|
}, |
|
{ |
|
"name": "efficient_sam_s_gpu.jit [ComfyUI-YoloWorld-EfficientSAM]", |
|
"type": "efficient_sam", |
|
"base": "efficient_sam", |
|
"save_path": "custom_nodes/ComfyUI-YoloWorld-EfficientSAM", |
|
"description": "Install efficient_sam_s_gpu.jit into ComfyUI-YoloWorld-EfficientSAM", |
|
"reference": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/tree/main", |
|
"filename": "efficient_sam_s_gpu.jit", |
|
"url": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/resolve/main/efficient_sam_s_gpu.jit" |
|
}, |
|
|
|
{ |
|
"name": "stabilityai/comfyui_checkpoints/stable_cascade_stage_b.safetensors", |
|
"type": "checkpoints", |
|
"base": "Stable Cascade", |
|
"save_path": "checkpoints/Stable-Cascade", |
|
"description": "[4.55GB] Stable Cascade stage_b checkpoints", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stable_cascade_stage_b.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/comfyui_checkpoints/stable_cascade_stage_b.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/comfyui_checkpoints/stable_cascade_stage_c.safetensors", |
|
"type": "checkpoints", |
|
"base": "Stable Cascade", |
|
"save_path": "checkpoints/Stable-Cascade", |
|
"description": "[9.22GB] Stable Cascade stage_c checkpoints", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stable_cascade_stage_c.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/comfyui_checkpoints/stable_cascade_stage_c.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: effnet_encoder.safetensors (VAE)", |
|
"type": "VAE", |
|
"base": "Stable Cascade", |
|
"save_path": "vae/Stable-Cascade", |
|
"description": "[81.5MB] Stable Cascade: effnet_encoder.\nVAE encoder for stage_c latent.", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "effnet_encoder.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/effnet_encoder.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_a.safetensors (VAE)", |
|
"type": "VAE", |
|
"base": "Stable Cascade", |
|
"save_path": "vae/Stable-Cascade", |
|
"description": "[73.7MB] Stable Cascade: stage_a", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_a.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_a.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[6.25GB] Stable Cascade: stage_b", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b_bf16.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[3.13GB] Stable Cascade: stage_b/bf16", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[2.8GB] Stable Cascade: stage_b/lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b_lite.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b_lite.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_b_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[1.4GB] Stable Cascade: stage_b/bf16,lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_b_lite_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_b_lite_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[14.4GB] Stable Cascade: stage_c", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c_bf16.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[7.18GB] Stable Cascade: stage_c/bf16", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[4.12GB] Stable Cascade: stage_c/lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c_lite.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c_lite.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: stage_c_lite.safetensors (UNET)", |
|
"type": "unet", |
|
"base": "Stable Cascade", |
|
"save_path": "unet/Stable-Cascade", |
|
"description": "[2.06GB] Stable Cascade: stage_c/bf16,lite", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "stage_c_lite_bf16.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/stage_c_lite_bf16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/Stable Cascade: text_encoder (CLIP)", |
|
"type": "clip", |
|
"base": "Stable Cascade", |
|
"save_path": "clip/Stable-Cascade", |
|
"description": "[1.39GB] Stable Cascade: text_encoder", |
|
"reference": "https://huggingface.co/stabilityai/stable-cascade", |
|
"filename": "model.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-cascade/resolve/main/text_encoder/model.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "1k3d68.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 1k3d68.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "1k3d68.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/1k3d68.onnx" |
|
}, |
|
{ |
|
"name": "2d106det.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 2d106det.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "2d106det.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/2d106det.onnx" |
|
}, |
|
{ |
|
"name": "genderage.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 genderage.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "genderage.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/genderage.onnx" |
|
}, |
|
{ |
|
"name": "glintr100.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 glintr100.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "glintr100.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/glintr100.onnx" |
|
}, |
|
{ |
|
"name": "scrfd_10g_bnkps.onnx", |
|
"type": "insightface", |
|
"base": "inswapper", |
|
"save_path": "insightface/models/antelopev2", |
|
"description": "Antelopev2 scrfd_10g_bnkps.onnx model for InstantId. (InstantId needs all Antelopev2 models)", |
|
"reference": "https://github.com/cubiq/ComfyUI_InstantID#installation", |
|
"filename": "scrfd_10g_bnkps.onnx", |
|
"url": "https://huggingface.co/MonsterMMORPG/tools/resolve/main/scrfd_10g_bnkps.onnx" |
|
}, |
|
|
|
{ |
|
"name": "photomaker-v1.bin", |
|
"type": "photomaker", |
|
"base": "SDXL", |
|
"save_path": "photomaker", |
|
"description": "PhotoMaker model. This model is compatible with SDXL.", |
|
"reference": "https://huggingface.co/TencentARC/PhotoMaker", |
|
"filename": "photomaker-v1.bin", |
|
"url": "https://huggingface.co/TencentARC/PhotoMaker/resolve/main/photomaker-v1.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sdxl.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sdxl.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sdxl.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Plus Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sdxl.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sdxl_lora.safetensors", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID LoRA Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sdxl_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sdxl_lora.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", |
|
"type": "lora", |
|
"base": "SDXL", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID-Plus V2 LoRA Model (SDXL) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sdxl_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "TencentARC/motionctrl.pth", |
|
"type": "checkpoints", |
|
"base": "MotionCtrl", |
|
"save_path": "checkpoints/motionctrl", |
|
"description": "To use the ComfyUI-MotionCtrl extension, downloading this model is required.", |
|
"reference": "https://huggingface.co/TencentARC/MotionCtrl", |
|
"filename": "motionctrl.pth", |
|
"url": "https://huggingface.co/TencentARC/MotionCtrl/resolve/main/motionctrl.pth" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID-Plus V2 Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plusv2_sd15_lora.safetensors", |
|
"type": "lora", |
|
"base": "SD1.5", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID-Plus V2 LoRA Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plusv2_sd15_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plusv2_sd15_lora.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plus_sd15_lora.safetensors", |
|
"type": "lora", |
|
"base": "SD1.5", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID Plus LoRA Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plus_sd15_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "ControlNet-HandRefiner-pruned (inpaint-depth-hand; fp16)", |
|
"type": "controlnet", |
|
"base": "SD1.5", |
|
"save_path": "default", |
|
"description": "This inpaint-depth controlnet model is specialized for the hand refiner.", |
|
"reference": "https://huggingface.co/hr16/ControlNet-HandRefiner-pruned", |
|
"filename": "control_sd15_inpaint_depth_hand_fp16.safetensors", |
|
"url": "https://huggingface.co/hr16/ControlNet-HandRefiner-pruned/resolve/main/control_sd15_inpaint_depth_hand_fp16.safetensors" |
|
}, |
|
{ |
|
"name": "stabilityai/stable-diffusion-x4-upscaler", |
|
"type": "checkpoints", |
|
"base": "upscale", |
|
"save_path": "checkpoints/upscale", |
|
"description": "[3.53GB] This upscaling model is a latent text-guided diffusion model and should be used with SD_4XUpscale_Conditioning and KSampler.", |
|
"reference": "https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler", |
|
"filename": "x4-upscaler-ema.safetensors", |
|
"url": "https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler/resolve/main/x4-upscaler-ema.safetensors" |
|
}, |
|
{ |
|
"name": "LDSR(Latent Diffusion Super Resolution)", |
|
"type": "upscale", |
|
"base": "upscale", |
|
"save_path": "upscale_models/ldsr", |
|
"description": "LDSR upscale model. Through the [a/ComfyUI-Flowty-LDSR](https://github.com/flowtyone/ComfyUI-Flowty-LDSR) extension, the upscale model can be utilized.", |
|
"reference": "https://github.com/CompVis/latent-diffusion", |
|
"filename": "last.ckpt", |
|
"url": "https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1" |
|
}, |
|
{ |
|
"name": "control_boxdepth_LooseControlfp16 (fp16)", |
|
"type": "controlnet", |
|
"base": "SD1.5", |
|
"save_path": "default", |
|
"description": "Loose ControlNet model", |
|
"reference": "https://huggingface.co/ioclab/LooseControl_WebUICombine", |
|
"filename": "control_boxdepth_LooseControlfp16.safetensors", |
|
"url": "https://huggingface.co/ioclab/LooseControl_WebUICombine/resolve/main/control_boxdepth_LooseControlfp16.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "ip-adapter-faceid-portrait_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Portrait Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-portrait_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-portrait_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid-plus_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Plus Model (SD1.5) [ipadapter]", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid-plus_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid-plus_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sd15.bin", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "IP-Adapter-FaceID Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sd15.bin", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15.bin" |
|
}, |
|
{ |
|
"name": "ip-adapter-faceid_sd15_lora.safetensors", |
|
"type": "lora", |
|
"base": "SD1.5", |
|
"save_path": "loras/ipadapter", |
|
"description": "IP-Adapter-FaceID LoRA Model (SD1.5)", |
|
"reference": "https://huggingface.co/h94/IP-Adapter-FaceID", |
|
"filename": "ip-adapter-faceid_sd15_lora.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter-FaceID/resolve/main/ip-adapter-faceid_sd15_lora.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "LongAnimatediff/lt_long_mm_16_64_frames_v1.1.ckpt (ComfyUI-AnimateDiff-Evolved) (Updated path)", |
|
"type": "animatediff", |
|
"base": "SD1.x", |
|
"save_path": "animatediff_models", |
|
"description": "Pressing 'install' directly downloads the model from the Kosinkadink/ComfyUI-AnimateDiff-Evolved extension node.", |
|
"reference": "https://huggingface.co/Lightricks/LongAnimateDiff", |
|
"filename": "lt_long_mm_16_64_frames_v1.1.ckpt", |
|
"url": "https://huggingface.co/Lightricks/LongAnimateDiff/resolve/main/lt_long_mm_16_64_frames_v1.1.ckpt" |
|
}, |
|
|
|
{ |
|
"name": "animatediff/v3_sd15_sparsectrl_rgb.ckpt (ComfyUI-AnimateDiff-Evolved)", |
|
"type": "controlnet", |
|
"base": "SD1.x", |
|
"save_path": "controlnet/SD1.5/animatediff", |
|
"description": "AnimateDiff SparseCtrl RGB ControlNet model", |
|
"reference": "https://huggingface.co/guoyww/animatediff", |
|
"filename": "v3_sd15_sparsectrl_rgb.ckpt", |
|
"url": "https://huggingface.co/guoyww/animatediff/resolve/main/v3_sd15_sparsectrl_rgb.ckpt" |
|
}, |
|
{ |
|
"name": "animatediff/v3_sd15_sparsectrl_scribble.ckpt", |
|
"type": "controlnet", |
|
"base": "SD1.x", |
|
"save_path": "controlnet/SD1.5/animatediff", |
|
"description": "AnimateDiff SparseCtrl Scribble ControlNet model", |
|
"reference": "https://huggingface.co/guoyww/animatediff", |
|
"filename": "v3_sd15_sparsectrl_scribble.ckpt", |
|
"url": "https://huggingface.co/guoyww/animatediff/resolve/main/v3_sd15_sparsectrl_scribble.ckpt" |
|
}, |
|
{ |
|
"name": "animatediff/v3_sd15_mm.ckpt (ComfyUI-AnimateDiff-Evolved)", |
|
"type": "animatediff", |
|
"base": "SD1.x", |
|
"save_path": "custom_nodes/ComfyUI-AnimateDiff-Evolved/models", |
|
"description": "Pressing 'install' directly downloads the model from the Kosinkadink/ComfyUI-AnimateDiff-Evolved extension node. (Note: Requires ComfyUI-Manager V0.24 or above)", |
|
"reference": "https://huggingface.co/guoyww/animatediff", |
|
"filename": "v3_sd15_mm.ckpt", |
|
"url": "https://huggingface.co/guoyww/animatediff/resolve/main/v3_sd15_mm.ckpt" |
|
}, |
|
{ |
|
"name": "animatediff/v3_sd15_adapter.ckpt", |
|
"type": "lora", |
|
"base": "SD1.x", |
|
"save_path": "loras/SD1.5/animatediff", |
|
"description": "AnimateDiff Adapter LoRA (SD1.5)", |
|
"reference": "https://huggingface.co/guoyww/animatediff", |
|
"filename": "v3_sd15_adapter.ckpt", |
|
"url": "https://huggingface.co/guoyww/animatediff/resolve/main/v3_sd15_adapter.ckpt" |
|
}, |
|
|
|
{ |
|
"name": "Segmind-Vega", |
|
"type": "checkpoints", |
|
"base": "segmind-vega", |
|
"save_path": "checkpoints/segmind-vega", |
|
"description": "The Segmind-Vega Model is a distilled version of the Stable Diffusion XL (SDXL), offering a remarkable 70% reduction in size and an impressive 100% speedup while retaining high-quality text-to-image generation capabilities.", |
|
"reference": "https://huggingface.co/segmind/Segmind-Vega", |
|
"filename": "segmind-vega.safetensors", |
|
"url": "https://huggingface.co/segmind/Segmind-Vega/resolve/main/segmind-vega.safetensors" |
|
}, |
|
{ |
|
"name": "Segmind-VegaRT - Latent Consistency Model (LCM) LoRA of Segmind-Vega", |
|
"type": "lora", |
|
"base": "segmind-vega", |
|
"save_path": "loras/segmind-vega", |
|
"description": "Segmind-VegaRT a distilled consistency adapter for Segmind-Vega that allows to reduce the number of inference steps to only between 2 - 8 steps.", |
|
"reference": "https://huggingface.co/segmind/Segmind-VegaRT", |
|
"filename": "pytorch_lora_weights.safetensors", |
|
"url": "https://huggingface.co/segmind/Segmind-VegaRT/resolve/main/pytorch_lora_weights.safetensors" |
|
}, |
|
|
|
{ |
|
"name": "stabilityai/Stable Zero123", |
|
"type": "zero123", |
|
"base": "zero123", |
|
"save_path": "checkpoints/zero123", |
|
"description": "Stable Zero123 is a model for view-conditioned image generation based on [a/Zero123](https://github.com/cvlab-columbia/zero123).", |
|
"reference": "https://huggingface.co/stabilityai/stable-zero123", |
|
"filename": "stable_zero123.ckpt", |
|
"url": "https://huggingface.co/stabilityai/stable-zero123/resolve/main/stable_zero123.ckpt" |
|
}, |
|
{ |
|
"name": "LongAnimatediff/lt_long_mm_32_frames.ckpt (ComfyUI-AnimateDiff-Evolved) (Updated path)", |
|
"type": "animatediff", |
|
"base": "SD1.x", |
|
"save_path": "animatediff_models", |
|
"description": "Pressing 'install' directly downloads the model from the Kosinkadink/ComfyUI-AnimateDiff-Evolved extension node.", |
|
"reference": "https://huggingface.co/Lightricks/LongAnimateDiff", |
|
"filename": "lt_long_mm_32_frames.ckpt", |
|
"url": "https://huggingface.co/Lightricks/LongAnimateDiff/resolve/main/lt_long_mm_32_frames.ckpt" |
|
}, |
|
{ |
|
"name": "LongAnimatediff/lt_long_mm_16_64_frames.ckpt (ComfyUI-AnimateDiff-Evolved) (Updated path)", |
|
"type": "animatediff", |
|
"base": "SD1.x", |
|
"save_path": "animatediff_models", |
|
"description": "Pressing 'install' directly downloads the model from the Kosinkadink/ComfyUI-AnimateDiff-Evolved extension node.", |
|
"reference": "https://huggingface.co/Lightricks/LongAnimateDiff", |
|
"filename": "lt_long_mm_16_64_frames.ckpt", |
|
"url": "https://huggingface.co/Lightricks/LongAnimateDiff/resolve/main/lt_long_mm_16_64_frames.ckpt" |
|
}, |
|
{ |
|
"name": "ip-adapter_sd15.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter_sd15.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter_sd15_light.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter_sd15_light.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_light.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter_sd15_vit-G.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter_sd15_vit-G.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter_sd15_vit-G.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-plus_sd15.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter-plus_sd15.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus_sd15.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-plus-face_sd15.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter-plus-face_sd15.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-plus-face_sd15.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter-full-face_sd15.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SD1.5", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter-full-face_sd15.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/models/ip-adapter-full-face_sd15.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter_sdxl.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SDXL", |
|
"save_path": "ipadapter", |
|
"description": "You can use this model in the [a/ComfyUI IPAdapter plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus) extension.", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter_sdxl.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl.safetensors" |
|
}, |
|
{ |
|
"name": "ip-adapter_sdxl_vit-h.safetensors", |
|
"type": "IP-Adapter", |
|
"base": "SDXL", |
|
"save_path": "ipadapter", |
|
"description": "This model requires the use of the SD1.5 encoder despite being for SDXL checkpoints", |
|
"reference": "https://huggingface.co/h94/IP-Adapter", |
|
"filename": "ip-adapter_sdxl_vit-h.safetensors", |
|
"url": "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl_vit-h.safetensors" |
|
} |
|
] |
|
}
|
|
|