diff options
author | Nuullll <vfirst218@gmail.com> | 2023-12-09 10:11:26 +0000 |
---|---|---|
committer | Nuullll <vfirst218@gmail.com> | 2023-12-09 10:11:26 +0000 |
commit | 049d5642e58d572ee8657ac754e72d019eea0e6c (patch) | |
tree | 559830dbe967472946402563c6061d8b65f03eff /modules/xpu_specific.py | |
parent | 59429793440fb3cb1624ddcc702c6f9807373203 (diff) | |
download | stable-diffusion-webui-gfx803-049d5642e58d572ee8657ac754e72d019eea0e6c.tar.gz stable-diffusion-webui-gfx803-049d5642e58d572ee8657ac754e72d019eea0e6c.tar.bz2 stable-diffusion-webui-gfx803-049d5642e58d572ee8657ac754e72d019eea0e6c.zip |
Fix format
Diffstat (limited to 'modules/xpu_specific.py')
-rw-r--r-- | modules/xpu_specific.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/xpu_specific.py b/modules/xpu_specific.py index 9bb0a561..d8da94a0 100644 --- a/modules/xpu_specific.py +++ b/modules/xpu_specific.py @@ -56,4 +56,4 @@ if has_xpu: lambda orig_func, tensors, dim=0, out=None: not all(t.dtype == tensors[0].dtype for t in tensors)) CondFunc('torch.nn.functional.scaled_dot_product_attention', lambda orig_func, query, key, value, attn_mask=None, dropout_p=0.0, is_causal=False: orig_func(query, key.to(query.dtype), value.to(query.dtype), attn_mask, dropout_p, is_causal), - lambda orig_func, query, key, value, attn_mask=None, dropout_p=0.0, is_causal=False: query.dtype != key.dtype or query.dtype != value.dtype)
\ No newline at end of file + lambda orig_func, query, key, value, attn_mask=None, dropout_p=0.0, is_causal=False: query.dtype != key.dtype or query.dtype != value.dtype) |