diff options
author | Fampai <unknown> | 2022-10-09 18:15:43 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-09 19:31:23 +0000 |
commit | 1824e9ee3ab4f94aee8908a62ea2569a01aeb3d7 (patch) | |
tree | bf3c4ed07b9d0e109bdd2c3ebd6c589706050df5 | |
parent | ad3ae441081155dcd4fde805279e5082ca264695 (diff) | |
download | stable-diffusion-webui-gfx803-1824e9ee3ab4f94aee8908a62ea2569a01aeb3d7.tar.gz stable-diffusion-webui-gfx803-1824e9ee3ab4f94aee8908a62ea2569a01aeb3d7.tar.bz2 stable-diffusion-webui-gfx803-1824e9ee3ab4f94aee8908a62ea2569a01aeb3d7.zip |
Removed unnecessary tmp variable
-rw-r--r-- | modules/sd_hijack.py | 7 |
1 files changed, 3 insertions, 4 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 7793d25b..437acce4 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -282,10 +282,9 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module): remade_batch_tokens_of_same_length = [x + [self.wrapped.tokenizer.eos_token_id] * (target_token_count - len(x)) for x in remade_batch_tokens]
tokens = torch.asarray(remade_batch_tokens_of_same_length).to(device)
- tmp = -opts.CLIP_stop_at_last_layers
- outputs = self.wrapped.transformer(input_ids=tokens, position_ids=position_ids, output_hidden_states=tmp)
- if tmp < -1:
- z = outputs.hidden_states[tmp]
+ outputs = self.wrapped.transformer(input_ids=tokens, position_ids=position_ids, output_hidden_states=-opts.CLIP_stop_at_last_layers)
+ if opts.CLIP_stop_at_last_layers > 1:
+ z = outputs.hidden_states[-opts.CLIP_stop_at_last_layers]
z = self.wrapped.transformer.text_model.final_layer_norm(z)
else:
z = outputs.last_hidden_state
|