aboutsummaryrefslogtreecommitdiff
path: root/modules
diff options
context:
space:
mode:
authorKohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com>2024-01-09 23:15:05 +0800
committerKohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com>2024-01-09 23:15:05 +0800
commit1fd69655fe340325863cbd7bf5297e034a6a3a0a (patch)
treedf5b8acde1ec1510494c3df1567fad76426c3482 /modules
parente00365962b17550a42235d1fbe2ad2c7cc4b8961 (diff)
Revert "Apply correct inference precision implementation"
This reverts commit e00365962b17550a42235d1fbe2ad2c7cc4b8961.
Diffstat (limited to 'modules')
-rw-r--r--modules/devices.py42
1 files changed, 9 insertions, 33 deletions
diff --git a/modules/devices.py b/modules/devices.py
index 9e1f207c..ad36f656 100644
--- a/modules/devices.py
+++ b/modules/devices.py
@@ -132,21 +132,6 @@ patch_module_list = [
]
-def cast_output(result):
- if isinstance(result, tuple):
- result = tuple(i.to(dtype_inference) if isinstance(i, torch.Tensor) else i for i in result)
- elif isinstance(result, torch.Tensor):
- result = result.to(dtype_inference)
- return result
-
-
-def autocast_with_cast_output(self, *args, **kwargs):
- result = self.org_forward(*args, **kwargs)
- if dtype_inference != dtype:
- result = cast_output(result)
- return result
-
-
def manual_cast_forward(target_dtype):
def forward_wrapper(self, *args, **kwargs):
if any(
@@ -164,7 +149,15 @@ def manual_cast_forward(target_dtype):
self.to(org_dtype)
if target_dtype != dtype_inference:
- result = cast_output(result)
+ if isinstance(result, tuple):
+ result = tuple(
+ i.to(dtype_inference)
+ if isinstance(i, torch.Tensor)
+ else i
+ for i in result
+ )
+ elif isinstance(result, torch.Tensor):
+ result = result.to(dtype_inference)
return result
return forward_wrapper
@@ -185,20 +178,6 @@ def manual_cast(target_dtype):
module_type.forward = module_type.org_forward
-@contextlib.contextmanager
-def precision_full_with_autocast(autocast_ctx):
- for module_type in patch_module_list:
- org_forward = module_type.forward
- module_type.forward = autocast_with_cast_output
- module_type.org_forward = org_forward
- try:
- with autocast_ctx:
- yield None
- finally:
- for module_type in patch_module_list:
- module_type.forward = module_type.org_forward
-
-
def autocast(disable=False):
if disable:
return contextlib.nullcontext()
@@ -212,9 +191,6 @@ def autocast(disable=False):
if has_xpu() or has_mps() or cuda_no_autocast():
return manual_cast(dtype_inference)
- if dtype_inference == torch.float32 and dtype != torch.float32:
- return precision_full_with_autocast(torch.autocast("cuda"))
-
return torch.autocast("cuda")