|
|
@ -297,8 +297,8 @@ def attention_split(q, k, v, heads, mask=None): |
|
|
|
BROKEN_XFORMERS = False |
|
|
|
BROKEN_XFORMERS = False |
|
|
|
try: |
|
|
|
try: |
|
|
|
x_vers = xformers.__version__ |
|
|
|
x_vers = xformers.__version__ |
|
|
|
#I think 0.0.23 is also broken (q with bs bigger than 65535 gives CUDA error) |
|
|
|
# XFormers bug confirmed on all versions from 0.0.21 to 0.0.26 (q with bs bigger than 65535 gives CUDA error) |
|
|
|
BROKEN_XFORMERS = x_vers.startswith("0.0.21") or x_vers.startswith("0.0.22") or x_vers.startswith("0.0.23") |
|
|
|
BROKEN_XFORMERS = x_vers.startswith("0.0.2") and not x_vers.startswith("0.0.20") |
|
|
|
except: |
|
|
|
except: |
|
|
|
pass |
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|