diff options
author | unknown <mcgpapu@gmail.com> | 2023-01-28 09:40:51 +0000 |
---|---|---|
committer | unknown <mcgpapu@gmail.com> | 2023-01-28 09:40:51 +0000 |
commit | e79b7db4b47a33889551b9266ee3277879d4f560 (patch) | |
tree | 1c1944204e58e254bfea22ae44edccdbb54e6b3c /modules/sd_hijack_checkpoint.py | |
parent | b921a52071cf2a5e551c31a6073af6eaebbf7847 (diff) | |
parent | e8a41df49fadd2cf9f23b1f02d75a4947bec5646 (diff) | |
download | stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.tar.gz stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.tar.bz2 stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.zip |
Merge branch 'master' of github.com:AUTOMATIC1111/stable-diffusion-webui into gamepad
Diffstat (limited to 'modules/sd_hijack_checkpoint.py')
-rw-r--r-- | modules/sd_hijack_checkpoint.py | 38 |
1 files changed, 37 insertions, 1 deletions
diff --git a/modules/sd_hijack_checkpoint.py b/modules/sd_hijack_checkpoint.py index 5712972f..2604d969 100644 --- a/modules/sd_hijack_checkpoint.py +++ b/modules/sd_hijack_checkpoint.py @@ -1,10 +1,46 @@ from torch.utils.checkpoint import checkpoint +import ldm.modules.attention +import ldm.modules.diffusionmodules.openaimodel + + def BasicTransformerBlock_forward(self, x, context=None): return checkpoint(self._forward, x, context) + def AttentionBlock_forward(self, x): return checkpoint(self._forward, x) + def ResBlock_forward(self, x, emb): - return checkpoint(self._forward, x, emb)
\ No newline at end of file + return checkpoint(self._forward, x, emb) + + +stored = [] + + +def add(): + if len(stored) != 0: + return + + stored.extend([ + ldm.modules.attention.BasicTransformerBlock.forward, + ldm.modules.diffusionmodules.openaimodel.ResBlock.forward, + ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward + ]) + + ldm.modules.attention.BasicTransformerBlock.forward = BasicTransformerBlock_forward + ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = ResBlock_forward + ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = AttentionBlock_forward + + +def remove(): + if len(stored) == 0: + return + + ldm.modules.attention.BasicTransformerBlock.forward = stored[0] + ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = stored[1] + ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = stored[2] + + stored.clear() + |