diff options
author | flamelaw <flamelaw.com3d2@gmail.com> | 2022-11-20 03:38:18 +0000 |
---|---|---|
committer | flamelaw <flamelaw.com3d2@gmail.com> | 2022-11-20 03:38:18 +0000 |
commit | a4a5735d0a80218e59f8a6e8401726f7209a6a8d (patch) | |
tree | 7dd0b6f78393275569831e9fc086df286489721c /modules/textual_inversion/dataset.py | |
parent | bd68e35de3b7cf7547ed97d8bdf60147402133cc (diff) | |
download | stable-diffusion-webui-gfx803-a4a5735d0a80218e59f8a6e8401726f7209a6a8d.tar.gz stable-diffusion-webui-gfx803-a4a5735d0a80218e59f8a6e8401726f7209a6a8d.tar.bz2 stable-diffusion-webui-gfx803-a4a5735d0a80218e59f8a6e8401726f7209a6a8d.zip |
remove unnecessary comment
Diffstat (limited to 'modules/textual_inversion/dataset.py')
-rw-r--r-- | modules/textual_inversion/dataset.py | 9 |
1 files changed, 0 insertions, 9 deletions
diff --git a/modules/textual_inversion/dataset.py b/modules/textual_inversion/dataset.py index d594b49d..1dd53b85 100644 --- a/modules/textual_inversion/dataset.py +++ b/modules/textual_inversion/dataset.py @@ -103,15 +103,6 @@ class PersonalizedBase(Dataset): if include_cond and not (self.tag_drop_out != 0 or self.shuffle_tags):
with torch.autocast("cuda"):
entry.cond = cond_model([entry.cond_text]).to(devices.cpu).squeeze(0)
- # elif not include_cond:
- # _, _, _, _, hijack_fixes, token_count = cond_model.process_text([entry.cond_text])
- # max_n = token_count // 75
- # index_list = [ [] for _ in range(max_n + 1) ]
- # for n, (z, _) in hijack_fixes[0]:
- # index_list[n].append(z)
- # with torch.autocast("cuda"):
- # entry.cond = cond_model([entry.cond_text]).to(devices.cpu).squeeze(0)
- # entry.emb_index = index_list
self.dataset.append(entry)
del torchdata
|