diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-11-05 14:09:42 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-11-05 14:09:42 +0000 |
commit | 62e3d71aa778928d63cab81d9d8cde33e55bebb3 (patch) | |
tree | 12d20a2da7e4d47befeceb9a36ca81436dfabea4 | |
parent | b8f2dfed3c0085f1df359b9dc5b3841ddc2196f0 (diff) | |
download | stable-diffusion-webui-gfx803-62e3d71aa778928d63cab81d9d8cde33e55bebb3.tar.gz stable-diffusion-webui-gfx803-62e3d71aa778928d63cab81d9d8cde33e55bebb3.tar.bz2 stable-diffusion-webui-gfx803-62e3d71aa778928d63cab81d9d8cde33e55bebb3.zip |
rework the code to not use the walrus operator because colab's 3.7 does not support it
-rw-r--r-- | modules/hypernetworks/hypernetwork.py | 7 |
1 files changed, 5 insertions, 2 deletions
diff --git a/modules/hypernetworks/hypernetwork.py b/modules/hypernetworks/hypernetwork.py index 5ceed6ee..7f182712 100644 --- a/modules/hypernetworks/hypernetwork.py +++ b/modules/hypernetworks/hypernetwork.py @@ -429,13 +429,16 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log weights = hypernetwork.weights()
for weight in weights:
weight.requires_grad = True
+
# Here we use optimizer from saved HN, or we can specify as UI option.
- if (optimizer_name := hypernetwork.optimizer_name) in optimizer_dict:
+ if hypernetwork.optimizer_name in optimizer_dict:
optimizer = optimizer_dict[hypernetwork.optimizer_name](params=weights, lr=scheduler.learn_rate)
+ optimizer_name = hypernetwork.optimizer_name
else:
- print(f"Optimizer type {optimizer_name} is not defined!")
+ print(f"Optimizer type {hypernetwork.optimizer_name} is not defined!")
optimizer = torch.optim.AdamW(params=weights, lr=scheduler.learn_rate)
optimizer_name = 'AdamW'
+
if hypernetwork.optimizer_state_dict: # This line must be changed if Optimizer type can be different from saved optimizer.
try:
optimizer.load_state_dict(hypernetwork.optimizer_state_dict)
|