aboutsummaryrefslogtreecommitdiffstats
path: root/modules/lowvram.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-24 08:57:59 +0000
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-24 08:57:59 +0000
commit2c11e9009ea18bab4ce2963d44db0c6fd3227370 (patch)
treecf70006b4d1d6df1f42ea944416b1034ae32a92b /modules/lowvram.py
parent1f26815dd3ea5a0b8c094854e47219aec866f28c (diff)
downloadstable-diffusion-webui-gfx803-2c11e9009ea18bab4ce2963d44db0c6fd3227370.tar.gz
stable-diffusion-webui-gfx803-2c11e9009ea18bab4ce2963d44db0c6fd3227370.tar.bz2
stable-diffusion-webui-gfx803-2c11e9009ea18bab4ce2963d44db0c6fd3227370.zip
repair --medvram for SD2.x too after SDXL update
Diffstat (limited to 'modules/lowvram.py')
-rw-r--r--modules/lowvram.py7
1 files changed, 4 insertions, 3 deletions
diff --git a/modules/lowvram.py b/modules/lowvram.py
index 6bbc11eb..3f830664 100644
--- a/modules/lowvram.py
+++ b/modules/lowvram.py
@@ -90,8 +90,12 @@ def setup_for_low_vram(sd_model, use_medvram):
sd_model.conditioner.register_forward_pre_hook(send_me_to_gpu)
elif is_sd2:
sd_model.cond_stage_model.model.register_forward_pre_hook(send_me_to_gpu)
+ sd_model.cond_stage_model.model.token_embedding.register_forward_pre_hook(send_me_to_gpu)
+ parents[sd_model.cond_stage_model.model] = sd_model.cond_stage_model
+ parents[sd_model.cond_stage_model.model.token_embedding] = sd_model.cond_stage_model
else:
sd_model.cond_stage_model.transformer.register_forward_pre_hook(send_me_to_gpu)
+ parents[sd_model.cond_stage_model.transformer] = sd_model.cond_stage_model
sd_model.first_stage_model.register_forward_pre_hook(send_me_to_gpu)
sd_model.first_stage_model.encode = first_stage_model_encode_wrap
@@ -101,9 +105,6 @@ def setup_for_low_vram(sd_model, use_medvram):
if sd_model.embedder:
sd_model.embedder.register_forward_pre_hook(send_me_to_gpu)
- if hasattr(sd_model, 'cond_stage_model'):
- parents[sd_model.cond_stage_model.transformer] = sd_model.cond_stage_model
-
if use_medvram:
sd_model.model.register_forward_pre_hook(send_me_to_gpu)
else: