diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-07-24 08:58:15 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2023-07-24 08:58:15 +0000 |
commit | f451994053140622ef5e394bc02ac166fb74e56f (patch) | |
tree | 5eed77756caca9f1840207c6efa40d3e2b95c62d | |
parent | ec83db897887b52fbf31b430cfc4386e3ad02424 (diff) | |
parent | 2c11e9009ea18bab4ce2963d44db0c6fd3227370 (diff) | |
download | stable-diffusion-webui-gfx803-f451994053140622ef5e394bc02ac166fb74e56f.tar.gz stable-diffusion-webui-gfx803-f451994053140622ef5e394bc02ac166fb74e56f.tar.bz2 stable-diffusion-webui-gfx803-f451994053140622ef5e394bc02ac166fb74e56f.zip |
Merge branch 'release_candidate' into dev
-rw-r--r-- | modules/lowvram.py | 7 | ||||
-rw-r--r-- | modules/sd_hijack_open_clip.py | 2 |
2 files changed, 5 insertions, 4 deletions
diff --git a/modules/lowvram.py b/modules/lowvram.py index 6bbc11eb..3f830664 100644 --- a/modules/lowvram.py +++ b/modules/lowvram.py @@ -90,8 +90,12 @@ def setup_for_low_vram(sd_model, use_medvram): sd_model.conditioner.register_forward_pre_hook(send_me_to_gpu)
elif is_sd2:
sd_model.cond_stage_model.model.register_forward_pre_hook(send_me_to_gpu)
+ sd_model.cond_stage_model.model.token_embedding.register_forward_pre_hook(send_me_to_gpu)
+ parents[sd_model.cond_stage_model.model] = sd_model.cond_stage_model
+ parents[sd_model.cond_stage_model.model.token_embedding] = sd_model.cond_stage_model
else:
sd_model.cond_stage_model.transformer.register_forward_pre_hook(send_me_to_gpu)
+ parents[sd_model.cond_stage_model.transformer] = sd_model.cond_stage_model
sd_model.first_stage_model.register_forward_pre_hook(send_me_to_gpu)
sd_model.first_stage_model.encode = first_stage_model_encode_wrap
@@ -101,9 +105,6 @@ def setup_for_low_vram(sd_model, use_medvram): if sd_model.embedder:
sd_model.embedder.register_forward_pre_hook(send_me_to_gpu)
- if hasattr(sd_model, 'cond_stage_model'):
- parents[sd_model.cond_stage_model.transformer] = sd_model.cond_stage_model
-
if use_medvram:
sd_model.model.register_forward_pre_hook(send_me_to_gpu)
else:
diff --git a/modules/sd_hijack_open_clip.py b/modules/sd_hijack_open_clip.py index bb0b96c7..25c5e983 100644 --- a/modules/sd_hijack_open_clip.py +++ b/modules/sd_hijack_open_clip.py @@ -32,7 +32,7 @@ class FrozenOpenCLIPEmbedderWithCustomWords(sd_hijack_clip.FrozenCLIPEmbedderWit def encode_embedding_init_text(self, init_text, nvpt):
ids = tokenizer.encode(init_text)
ids = torch.asarray([ids], device=devices.device, dtype=torch.int)
- embedded = self.wrapped.model.token_embedding.wrapped(ids.to(self.wrapped.model.token_embedding.wrapped.weight.device)).squeeze(0)
+ embedded = self.wrapped.model.token_embedding.wrapped(ids).squeeze(0)
return embedded
|