diff options
author | pieresimakp <69743585+pieresimakp@users.noreply.github.com> | 2023-03-25 23:00:45 +0800 |
---|---|---|
committer | pieresimakp <69743585+pieresimakp@users.noreply.github.com> | 2023-03-25 23:00:45 +0800 |
commit | e3b9d0e3e8adfb6214a1eb7acf450574f427ff9d (patch) | |
tree | c9c64ad1f926df990fb2ce05c6eec063de195eec /modules/sd_hijack_unet.py | |
parent | 771ea212de13711b494b082d8e94e79b17ac9d08 (diff) | |
parent | 91ae48fd7e20c60d6374f340cac0939f56d87048 (diff) |
Merge branch 'master' into img2img-detect-image-size
Diffstat (limited to 'modules/sd_hijack_unet.py')
-rw-r--r-- | modules/sd_hijack_unet.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack_unet.py b/modules/sd_hijack_unet.py index 843ab66c..15858263 100644 --- a/modules/sd_hijack_unet.py +++ b/modules/sd_hijack_unet.py @@ -67,7 +67,7 @@ def hijack_ddpm_edit(): unet_needs_upcast = lambda *args, **kwargs: devices.unet_needs_upcast
CondFunc('ldm.models.diffusion.ddpm.LatentDiffusion.apply_model', apply_model, unet_needs_upcast)
CondFunc('ldm.modules.diffusionmodules.openaimodel.timestep_embedding', lambda orig_func, timesteps, *args, **kwargs: orig_func(timesteps, *args, **kwargs).to(torch.float32 if timesteps.dtype == torch.int64 else devices.dtype_unet), unet_needs_upcast)
-if version.parse(torch.__version__) <= version.parse("1.13.1"):
+if version.parse(torch.__version__) <= version.parse("1.13.2") or torch.cuda.is_available():
CondFunc('ldm.modules.diffusionmodules.util.GroupNorm32.forward', lambda orig_func, self, *args, **kwargs: orig_func(self.float(), *args, **kwargs), unet_needs_upcast)
CondFunc('ldm.modules.attention.GEGLU.forward', lambda orig_func, self, x: orig_func(self.float(), x.float()).to(devices.dtype_unet), unet_needs_upcast)
CondFunc('open_clip.transformer.ResidualAttentionBlock.__init__', lambda orig_func, *args, **kwargs: kwargs.update({'act_layer': GELUHijack}) and False or orig_func(*args, **kwargs), lambda _, *args, **kwargs: kwargs.get('act_layer') is None or kwargs['act_layer'] == torch.nn.GELU)
|