aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_hijack_checkpoint.py
diff options
context:
space:
mode:
authorunknown <mcgpapu@gmail.com>2023-01-28 09:40:51 +0000
committerunknown <mcgpapu@gmail.com>2023-01-28 09:40:51 +0000
commite79b7db4b47a33889551b9266ee3277879d4f560 (patch)
tree1c1944204e58e254bfea22ae44edccdbb54e6b3c /modules/sd_hijack_checkpoint.py
parentb921a52071cf2a5e551c31a6073af6eaebbf7847 (diff)
parente8a41df49fadd2cf9f23b1f02d75a4947bec5646 (diff)
downloadstable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.tar.gz
stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.tar.bz2
stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.zip
Merge branch 'master' of github.com:AUTOMATIC1111/stable-diffusion-webui into gamepad
Diffstat (limited to 'modules/sd_hijack_checkpoint.py')
-rw-r--r--modules/sd_hijack_checkpoint.py38
1 files changed, 37 insertions, 1 deletions
diff --git a/modules/sd_hijack_checkpoint.py b/modules/sd_hijack_checkpoint.py
index 5712972f..2604d969 100644
--- a/modules/sd_hijack_checkpoint.py
+++ b/modules/sd_hijack_checkpoint.py
@@ -1,10 +1,46 @@
from torch.utils.checkpoint import checkpoint
+import ldm.modules.attention
+import ldm.modules.diffusionmodules.openaimodel
+
+
def BasicTransformerBlock_forward(self, x, context=None):
return checkpoint(self._forward, x, context)
+
def AttentionBlock_forward(self, x):
return checkpoint(self._forward, x)
+
def ResBlock_forward(self, x, emb):
- return checkpoint(self._forward, x, emb) \ No newline at end of file
+ return checkpoint(self._forward, x, emb)
+
+
+stored = []
+
+
+def add():
+ if len(stored) != 0:
+ return
+
+ stored.extend([
+ ldm.modules.attention.BasicTransformerBlock.forward,
+ ldm.modules.diffusionmodules.openaimodel.ResBlock.forward,
+ ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward
+ ])
+
+ ldm.modules.attention.BasicTransformerBlock.forward = BasicTransformerBlock_forward
+ ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = ResBlock_forward
+ ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = AttentionBlock_forward
+
+
+def remove():
+ if len(stored) == 0:
+ return
+
+ ldm.modules.attention.BasicTransformerBlock.forward = stored[0]
+ ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = stored[1]
+ ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = stored[2]
+
+ stored.clear()
+