aboutsummaryrefslogtreecommitdiff
path: root/modules/sd_models_xl.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-12-16 10:22:51 +0300
committerGitHub <noreply@github.com>2023-12-16 10:22:51 +0300
commitc121f8c31587a21020e8670664977f6f76e68905 (patch)
tree5db19664111d4264d4b018bee6557d7d6b0ec1c4 /modules/sd_models_xl.py
parent60186c7b9d6034ff08f4fe9e213a495b5321302d (diff)
parent8edb9144cc76b39f3d68c0407b3bb990809d1b03 (diff)
Merge pull request #14031 from AUTOMATIC1111/test-fp8
A big improvement for dtype casting system with fp8 storage type and manual cast
Diffstat (limited to 'modules/sd_models_xl.py')
-rw-r--r--modules/sd_models_xl.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_models_xl.py b/modules/sd_models_xl.py
index 01123321..11259a36 100644
--- a/modules/sd_models_xl.py
+++ b/modules/sd_models_xl.py
@@ -93,7 +93,7 @@ def extend_sdxl(model):
model.parameterization = "v" if isinstance(model.denoiser.scaling, sgm.modules.diffusionmodules.denoiser_scaling.VScaling) else "eps"
discretization = sgm.modules.diffusionmodules.discretizer.LegacyDDPMDiscretization()
- model.alphas_cumprod = torch.asarray(discretization.alphas_cumprod, device=devices.device, dtype=dtype)
+ model.alphas_cumprod = torch.asarray(discretization.alphas_cumprod, device=devices.device, dtype=torch.float32)
model.conditioner.wrapped = torch.nn.Module()