diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-05-10 08:05:02 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-05-10 08:05:02 +0000 |
commit | 028d3f6425d85f122027c127fba8bcbf4f66ee75 (patch) | |
tree | 05e714f118aaa4d25920918807e0b83344133629 /modules/sd_hijack.py | |
parent | e42de4b8a2356c6d286adb07292442d75e5595d3 (diff) | |
download | stable-diffusion-webui-gfx803-028d3f6425d85f122027c127fba8bcbf4f66ee75.tar.gz stable-diffusion-webui-gfx803-028d3f6425d85f122027c127fba8bcbf4f66ee75.tar.bz2 stable-diffusion-webui-gfx803-028d3f6425d85f122027c127fba8bcbf4f66ee75.zip |
ruff auto fixes
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 81573b78..e374aeb8 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -37,7 +37,7 @@ def apply_optimizations(): optimization_method = None
- can_use_sdp = hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(getattr(torch.nn.functional, "scaled_dot_product_attention")) # not everyone has torch 2.x to use sdp
+ can_use_sdp = hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention) # not everyone has torch 2.x to use sdp
if cmd_opts.force_enable_xformers or (cmd_opts.xformers and shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)):
print("Applying xformers cross attention optimization.")
|