aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVivek K. Vasishtha <vivekkumar.be01@gmail.com>2023-06-03 21:54:27 +0530
committerGitHub <noreply@github.com>2023-06-03 21:54:27 +0530
commitb1a72bc7e292246e70ec8ebebd3a9ca42dffff03 (patch)
tree9dbb9a1742c24a5e32f456efa88974be7d77e1b3
parentb6af0a3809ea869fb180633f9affcae4b199ffcf (diff)
torch.cuda.is_available() check for SdOptimizationXformers
-rw-r--r--modules/sd_hijack_optimizations.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py
index 80e48a42..c2660177 100644
--- a/modules/sd_hijack_optimizations.py
+++ b/modules/sd_hijack_optimizations.py
@@ -48,7 +48,7 @@ class SdOptimizationXformers(SdOptimization):
priority = 100
def is_available(self):
- return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
+ return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)
def apply(self):
ldm.modules.attention.CrossAttention.forward = xformers_attention_forward