diff options
author | FNSpd <125805478+FNSpd@users.noreply.github.com> | 2023-03-24 12:29:16 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-03-24 12:29:16 +0000 |
commit | 280ed8f00fde0ece026339acdd42888ac4dc3167 (patch) | |
tree | 826ab8dd91889da496a6d60ad8566e551d082d89 /modules/sd_hijack_optimizations.py | |
parent | beb7dda5d6d5baa1570721fd7ca18e236fa02521 (diff) | |
download | stable-diffusion-webui-gfx803-280ed8f00fde0ece026339acdd42888ac4dc3167.tar.gz stable-diffusion-webui-gfx803-280ed8f00fde0ece026339acdd42888ac4dc3167.tar.bz2 stable-diffusion-webui-gfx803-280ed8f00fde0ece026339acdd42888ac4dc3167.zip |
Update sd_hijack_optimizations.py
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index eaff12f0..372555ff 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -372,7 +372,7 @@ def scaled_dot_product_attention_forward(self, x, context=None, mask=None): dtype = q.dtype
if shared.opts.upcast_attn:
- q, k = q.float(), k.float()
+ q, k, v = q.float(), k.float(), v.float()
# the output of sdp = (batch, num_heads, seq_len, head_dim)
hidden_states = torch.nn.functional.scaled_dot_product_attention(
|