aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFNSpd <125805478+FNSpd@users.noreply.github.com>2023-03-24 16:29:16 +0400
committerGitHub <noreply@github.com>2023-03-24 16:29:16 +0400
commit280ed8f00fde0ece026339acdd42888ac4dc3167 (patch)
tree826ab8dd91889da496a6d60ad8566e551d082d89
parentbeb7dda5d6d5baa1570721fd7ca18e236fa02521 (diff)
Update sd_hijack_optimizations.py
-rw-r--r--modules/sd_hijack_optimizations.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py
index eaff12f0..372555ff 100644
--- a/modules/sd_hijack_optimizations.py
+++ b/modules/sd_hijack_optimizations.py
@@ -372,7 +372,7 @@ def scaled_dot_product_attention_forward(self, x, context=None, mask=None):
dtype = q.dtype
if shared.opts.upcast_attn:
- q, k = q.float(), k.float()
+ q, k, v = q.float(), k.float(), v.float()
# the output of sdp = (batch, num_heads, seq_len, head_dim)
hidden_states = torch.nn.functional.scaled_dot_product_attention(