diff options
author | CodeHatchling <steve@codehatch.com> | 2023-12-03 04:14:02 +0000 |
---|---|---|
committer | CodeHatchling <steve@codehatch.com> | 2023-12-03 04:14:02 +0000 |
commit | 3bd3a091604a332de6ff249870dabd2a91215499 (patch) | |
tree | 0323625627748ee44fc192bb2496585a4db56b5a /modules/sub_quadratic_attention.py | |
parent | bb04d400c95df01d191ef6c1a43e66b95425fa33 (diff) | |
parent | f0f100e67b78f686dc73cf3c8cad422e45cc9b8a (diff) | |
download | stable-diffusion-webui-gfx803-3bd3a091604a332de6ff249870dabd2a91215499.tar.gz stable-diffusion-webui-gfx803-3bd3a091604a332de6ff249870dabd2a91215499.tar.bz2 stable-diffusion-webui-gfx803-3bd3a091604a332de6ff249870dabd2a91215499.zip |
Merge remote-tracking branch 'origin/dev' into soft-inpainting
# Conflicts:
# modules/processing.py
Diffstat (limited to 'modules/sub_quadratic_attention.py')
-rw-r--r-- | modules/sub_quadratic_attention.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/sub_quadratic_attention.py b/modules/sub_quadratic_attention.py index ae4ee4bb..4cb561ef 100644 --- a/modules/sub_quadratic_attention.py +++ b/modules/sub_quadratic_attention.py @@ -15,7 +15,7 @@ import torch from torch import Tensor from torch.utils.checkpoint import checkpoint import math -from typing import Optional, NamedTuple, List +from typing import Optional, NamedTuple def narrow_trunc( @@ -97,7 +97,7 @@ def _query_chunk_attention( ) return summarize_chunk(query, key_chunk, value_chunk) - chunks: List[AttnChunk] = [ + chunks: list[AttnChunk] = [ chunk_scanner(chunk) for chunk in torch.arange(0, k_tokens, kv_chunk_size) ] acc_chunk = AttnChunk(*map(torch.stack, zip(*chunks))) |