Browse Source

Remove xformers related print.

pull/1395/head
comfyanonymous 1 year ago
parent
commit
0e3b641172
  1. 2
      comfy/ldm/modules/attention.py

2
comfy/ldm/modules/attention.py

@ -402,8 +402,6 @@ class MemoryEfficientCrossAttention(nn.Module):
# https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0, dtype=None, device=None, operations=comfy.ops):
super().__init__()
print(f"Setting up {self.__class__.__name__}. Query dim is {query_dim}, context_dim is {context_dim} and using "
f"{heads} heads.")
inner_dim = dim_head * heads
context_dim = default(context_dim, query_dim)

Loading…
Cancel
Save