diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-04-29 13:50:52 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-04-29 13:50:52 +0000 |
commit | 263f0fb59c7afa9b4739b226725dd0b38956a8cf (patch) | |
tree | 2ebe6b5e7763ab2c95aa8bd72ce6d8f6afbd87ce /modules/lowvram.py | |
parent | 082613036aa7b9a8a008384b1770046d6714bc28 (diff) | |
parent | faff08f396f159a5ddd6328a6d2699b7e7d18ef9 (diff) | |
download | stable-diffusion-webui-gfx803-263f0fb59c7afa9b4739b226725dd0b38956a8cf.tar.gz stable-diffusion-webui-gfx803-263f0fb59c7afa9b4739b226725dd0b38956a8cf.tar.bz2 stable-diffusion-webui-gfx803-263f0fb59c7afa9b4739b226725dd0b38956a8cf.zip |
Merge branch 'dev' into remove-watermark-option
Diffstat (limited to 'modules/lowvram.py')
-rw-r--r-- | modules/lowvram.py | 10 |
1 files changed, 6 insertions, 4 deletions
diff --git a/modules/lowvram.py b/modules/lowvram.py index 042a0254..e254cc13 100644 --- a/modules/lowvram.py +++ b/modules/lowvram.py @@ -55,12 +55,12 @@ def setup_for_low_vram(sd_model, use_medvram): if hasattr(sd_model.cond_stage_model, 'model'):
sd_model.cond_stage_model.transformer = sd_model.cond_stage_model.model
- # remove four big modules, cond, first_stage, depth (if applicable), and unet from the model and then
+ # remove several big modules: cond, first_stage, depth/embedder (if applicable), and unet from the model and then
# send the model to GPU. Then put modules back. the modules will be in CPU.
- stored = sd_model.cond_stage_model.transformer, sd_model.first_stage_model, getattr(sd_model, 'depth_model', None), sd_model.model
- sd_model.cond_stage_model.transformer, sd_model.first_stage_model, sd_model.depth_model, sd_model.model = None, None, None, None
+ stored = sd_model.cond_stage_model.transformer, sd_model.first_stage_model, getattr(sd_model, 'depth_model', None), getattr(sd_model, 'embedder', None), sd_model.model
+ sd_model.cond_stage_model.transformer, sd_model.first_stage_model, sd_model.depth_model, sd_model.embedder, sd_model.model = None, None, None, None, None
sd_model.to(devices.device)
- sd_model.cond_stage_model.transformer, sd_model.first_stage_model, sd_model.depth_model, sd_model.model = stored
+ sd_model.cond_stage_model.transformer, sd_model.first_stage_model, sd_model.depth_model, sd_model.embedder, sd_model.model = stored
# register hooks for those the first three models
sd_model.cond_stage_model.transformer.register_forward_pre_hook(send_me_to_gpu)
@@ -69,6 +69,8 @@ def setup_for_low_vram(sd_model, use_medvram): sd_model.first_stage_model.decode = first_stage_model_decode_wrap
if sd_model.depth_model:
sd_model.depth_model.register_forward_pre_hook(send_me_to_gpu)
+ if sd_model.embedder:
+ sd_model.embedder.register_forward_pre_hook(send_me_to_gpu)
parents[sd_model.cond_stage_model.transformer] = sd_model.cond_stage_model
if hasattr(sd_model.cond_stage_model, 'model'):
|