From 2aed53c4ac78d842a2e984d23343334a29ed8562 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Tue, 30 Apr 2024 21:23:40 -0400 Subject: [PATCH] Workaround xformers bug. --- comfy/ldm/modules/attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/comfy/ldm/modules/attention.py b/comfy/ldm/modules/attention.py index f116efee..d51a2fae 100644 --- a/comfy/ldm/modules/attention.py +++ b/comfy/ldm/modules/attention.py @@ -297,8 +297,8 @@ def attention_split(q, k, v, heads, mask=None): BROKEN_XFORMERS = False try: x_vers = xformers.__version__ - #I think 0.0.23 is also broken (q with bs bigger than 65535 gives CUDA error) - BROKEN_XFORMERS = x_vers.startswith("0.0.21") or x_vers.startswith("0.0.22") or x_vers.startswith("0.0.23") + # XFormers bug confirmed on all versions from 0.0.21 to 0.0.26 (q with bs bigger than 65535 gives CUDA error) + BROKEN_XFORMERS = x_vers.startswith("0.0.2") and not x_vers.startswith("0.0.20") except: pass