From 0e3b64117218c50a554b492269f5f35779839695 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Fri, 1 Sep 2023 02:12:03 -0400 Subject: [PATCH] Remove xformers related print. --- comfy/ldm/modules/attention.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/comfy/ldm/modules/attention.py b/comfy/ldm/modules/attention.py index 973619bf..9fdfbd21 100644 --- a/comfy/ldm/modules/attention.py +++ b/comfy/ldm/modules/attention.py @@ -402,8 +402,6 @@ class MemoryEfficientCrossAttention(nn.Module): # https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223 def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0, dtype=None, device=None, operations=comfy.ops): super().__init__() - print(f"Setting up {self.__class__.__name__}. Query dim is {query_dim}, context_dim is {context_dim} and using " - f"{heads} heads.") inner_dim = dim_head * heads context_dim = default(context_dim, query_dim)