Skip to content

Commit cb96d4d

Browse files
Disable workaround on newer cudnn. (#10807)
1 parent 394348f commit cb96d4d

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

comfy/ops.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,8 @@ def scaled_dot_product_attention(q, k, v, *args, **kwargs):
5858
NVIDIA_MEMORY_CONV_BUG_WORKAROUND = False
5959
try:
6060
if comfy.model_management.is_nvidia():
61-
if torch.backends.cudnn.version() >= 91002 and comfy.model_management.torch_version_numeric >= (2, 9) and comfy.model_management.torch_version_numeric <= (2, 10):
61+
cudnn_version = torch.backends.cudnn.version()
62+
if (cudnn_version >= 91002 and cudnn_version < 91500) and comfy.model_management.torch_version_numeric >= (2, 9) and comfy.model_management.torch_version_numeric <= (2, 10):
6263
#TODO: change upper bound version once it's fixed'
6364
NVIDIA_MEMORY_CONV_BUG_WORKAROUND = True
6465
logging.info("working around nvidia conv3d memory bug.")

0 commit comments

Comments
 (0)