aboutsummaryrefslogtreecommitdiff
path: root/modules/mac_specific.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-05-21 17:37:09 +0300
committerAUTOMATIC <16777216c@gmail.com>2023-05-21 17:37:09 +0300
commit1f3182924ba8e70d0e0fc3ed270782f324376ba3 (patch)
tree27a9e5167e5b981dfe56f5084ea8e1e8743f3fc0 /modules/mac_specific.py
parent89f9faa63388756314e8a1d96cf86bf5e0663045 (diff)
parentfdaf0147b6d2a5f599464bb7c65817ef5832eff1 (diff)
Merge branch 'dev' into release_candidate
Diffstat (limited to 'modules/mac_specific.py')
-rw-r--r--modules/mac_specific.py5
1 files changed, 2 insertions, 3 deletions
diff --git a/modules/mac_specific.py b/modules/mac_specific.py
index 40ce2101..d74c6b95 100644
--- a/modules/mac_specific.py
+++ b/modules/mac_specific.py
@@ -1,6 +1,5 @@
import torch
import platform
-from modules import paths
from modules.sd_hijack_utils import CondFunc
from packaging import version
@@ -43,7 +42,7 @@ if has_mps:
# MPS workaround for https://github.com/pytorch/pytorch/issues/79383
CondFunc('torch.Tensor.to', lambda orig_func, self, *args, **kwargs: orig_func(self.contiguous(), *args, **kwargs),
lambda _, self, *args, **kwargs: self.device.type != 'mps' and (args and isinstance(args[0], torch.device) and args[0].type == 'mps' or isinstance(kwargs.get('device'), torch.device) and kwargs['device'].type == 'mps'))
- # MPS workaround for https://github.com/pytorch/pytorch/issues/80800
+ # MPS workaround for https://github.com/pytorch/pytorch/issues/80800
CondFunc('torch.nn.functional.layer_norm', lambda orig_func, *args, **kwargs: orig_func(*([args[0].contiguous()] + list(args[1:])), **kwargs),
lambda _, *args, **kwargs: args and isinstance(args[0], torch.Tensor) and args[0].device.type == 'mps')
# MPS workaround for https://github.com/pytorch/pytorch/issues/90532
@@ -61,4 +60,4 @@ if has_mps:
# MPS workaround for https://github.com/pytorch/pytorch/issues/92311
if platform.processor() == 'i386':
for funcName in ['torch.argmax', 'torch.Tensor.argmax']:
- CondFunc(funcName, lambda _, input, *args, **kwargs: torch.max(input.float() if input.dtype == torch.int64 else input, *args, **kwargs)[1], lambda _, input, *args, **kwargs: input.device.type == 'mps') \ No newline at end of file
+ CondFunc(funcName, lambda _, input, *args, **kwargs: torch.max(input.float() if input.dtype == torch.int64 else input, *args, **kwargs)[1], lambda _, input, *args, **kwargs: input.device.type == 'mps')