Browse Source

Workaround xformers bug.

pull/3392/head
comfyanonymous 7 months ago
parent
commit
2aed53c4ac
  1. 4
      comfy/ldm/modules/attention.py

4
comfy/ldm/modules/attention.py

@ -297,8 +297,8 @@ def attention_split(q, k, v, heads, mask=None):
BROKEN_XFORMERS = False
try:
x_vers = xformers.__version__
#I think 0.0.23 is also broken (q with bs bigger than 65535 gives CUDA error)
BROKEN_XFORMERS = x_vers.startswith("0.0.21") or x_vers.startswith("0.0.22") or x_vers.startswith("0.0.23")
# XFormers bug confirmed on all versions from 0.0.21 to 0.0.26 (q with bs bigger than 65535 gives CUDA error)
BROKEN_XFORMERS = x_vers.startswith("0.0.2") and not x_vers.startswith("0.0.20")
except:
pass

Loading…
Cancel
Save