From a7375103b9c80bb7607f85faa4afbf11ab5a5685 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Wed, 17 May 2023 23:04:40 -0400 Subject: [PATCH] Some small changes to Load/SaveLatent. --- nodes.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/nodes.py b/nodes.py index a2c7713a..7255621d 100644 --- a/nodes.py +++ b/nodes.py @@ -11,6 +11,7 @@ import time from PIL import Image from PIL.PngImagePlugin import PngInfo import numpy as np +import safetensors.torch sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), "comfy")) @@ -29,7 +30,6 @@ import importlib import folder_paths -import safetensors.torch as sft def before_node_execution(): comfy.model_management.throw_exception_if_processing_interrupted() @@ -307,7 +307,10 @@ class SaveLatent: file = f"{filename}_{counter:05}_.latent" file = os.path.join(full_output_folder, file) - sft.save_file(samples, file, metadata=metadata) + output = {} + output["latent_tensor"] = samples["samples"] + + safetensors.torch.save_file(output, file, metadata=metadata) return {} @@ -328,9 +331,10 @@ class LoadLatent: def load(self, latent): file = folder_paths.get_annotated_filepath(latent, self.input_dir) - latent = sft.load_file(file, device="cpu") + latent = safetensors.torch.load_file(file, device="cpu") + samples = {"samples": latent["latent_tensor"]} - return (latent, ) + return (samples, ) class CheckpointLoader: