diff options
author | brkirch <brkirch@users.noreply.github.com> | 2023-01-06 21:42:47 +0000 |
---|---|---|
committer | brkirch <brkirch@users.noreply.github.com> | 2023-01-06 21:42:47 +0000 |
commit | c18add68ef7d2de3617cbbaff864b0c74cfdf6c0 (patch) | |
tree | 6388fd13dad83130907dd39859c069eb9e014416 /modules/sd_hijack_optimizations.py | |
parent | 848605fb654a55ee6947335d7df6e13366606fad (diff) | |
download | stable-diffusion-webui-gfx803-c18add68ef7d2de3617cbbaff864b0c74cfdf6c0.tar.gz stable-diffusion-webui-gfx803-c18add68ef7d2de3617cbbaff864b0c74cfdf6c0.tar.bz2 stable-diffusion-webui-gfx803-c18add68ef7d2de3617cbbaff864b0c74cfdf6c0.zip |
Added license
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index b416e9ac..cdc63ed7 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -216,6 +216,7 @@ def split_cross_attention_forward_invokeAI(self, x, context=None, mask=None): # Based on Birch-san's modified implementation of sub-quadratic attention from https://github.com/Birch-san/diffusers/pull/1
+# The sub_quad_attention_forward function is under the MIT License listed under Memory Efficient Attention in the Licenses section of the web UI interface
def sub_quad_attention_forward(self, x, context=None, mask=None):
assert mask is None, "attention-mask not currently implemented for SubQuadraticCrossAttnProcessor."
|