aboutsummaryrefslogtreecommitdiff
path: root/modules
diff options
context:
space:
mode:
authorbrkirch <brkirch@users.noreply.github.com>2023-01-07 07:04:59 -0500
committerbrkirch <brkirch@users.noreply.github.com>2023-01-07 07:04:59 -0500
commitdf3b31eb559ab9fabf7e513bdeddd5282c16f124 (patch)
treea58f630c89c6ff95dab50fa0adeac22bfeb6fe32 /modules
parent151233399c4b79934bdbb7c12a97eeb6499572fb (diff)
In-place operations can break gradient calculation
Diffstat (limited to 'modules')
-rw-r--r--modules/sd_hijack_clip.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/sd_hijack_clip.py b/modules/sd_hijack_clip.py
index 5520c9b2..852afc66 100644
--- a/modules/sd_hijack_clip.py
+++ b/modules/sd_hijack_clip.py
@@ -247,9 +247,9 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module):
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
batch_multipliers = torch.asarray(batch_multipliers).to(devices.device)
original_mean = z.mean()
- z *= batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
+ z = z * batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
new_mean = z.mean()
- z *= original_mean / new_mean
+ z = z * (original_mean / new_mean)
return z