aboutsummaryrefslogtreecommitdiff
path: root/modules
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-26 13:03:52 +0300
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-26 13:04:10 +0300
commit5c8f91b22975701af22d24f947af82e7d23264d5 (patch)
tree719bc68244d850845760b1cb22dc27991fc76de7 /modules
parent6b877c35da12029106062bfa8dfefb6a565f13ff (diff)
fix autograd which i broke for no good reason when implementing SDXL
Diffstat (limited to 'modules')
-rw-r--r--modules/sd_hijack_clip.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/sd_hijack_clip.py b/modules/sd_hijack_clip.py
index 5443e609..990533fe 100644
--- a/modules/sd_hijack_clip.py
+++ b/modules/sd_hijack_clip.py
@@ -273,9 +273,9 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module):
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
batch_multipliers = torch.asarray(batch_multipliers).to(devices.device)
original_mean = z.mean()
- z *= batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
+ z = z * batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape)
new_mean = z.mean()
- z *= (original_mean / new_mean)
+ z = z * (original_mean / new_mean)
return z