diff options
author | guaneec <guaneec@users.noreply.github.com> | 2022-10-26 05:43:04 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-26 05:43:04 +0000 |
commit | c702d4d0df21790199d199818f25c449213ffe0f (patch) | |
tree | 62af849d91e2fc7b09d1d62fb7cacd1a613906aa /modules/hypernetworks/hypernetwork.py | |
parent | 2f4c91894d4c0a055c1069b2fda0e4da8fcda188 (diff) | |
download | stable-diffusion-webui-gfx803-c702d4d0df21790199d199818f25c449213ffe0f.tar.gz stable-diffusion-webui-gfx803-c702d4d0df21790199d199818f25c449213ffe0f.tar.bz2 stable-diffusion-webui-gfx803-c702d4d0df21790199d199818f25c449213ffe0f.zip |
Fix off-by-one
Diffstat (limited to 'modules/hypernetworks/hypernetwork.py')
-rw-r--r-- | modules/hypernetworks/hypernetwork.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/hypernetworks/hypernetwork.py b/modules/hypernetworks/hypernetwork.py index 54346b64..3ce85bb5 100644 --- a/modules/hypernetworks/hypernetwork.py +++ b/modules/hypernetworks/hypernetwork.py @@ -42,7 +42,7 @@ class HypernetworkModule(torch.nn.Module): linears.append(torch.nn.Linear(int(dim * layer_structure[i]), int(dim * layer_structure[i+1])))
# Add an activation func except last layer
- if activation_func == "linear" or activation_func is None or i >= len(layer_structure) - 3:
+ if activation_func == "linear" or activation_func is None or i >= len(layer_structure) - 2:
pass
elif activation_func in self.activation_dict:
linears.append(self.activation_dict[activation_func]())
@@ -54,7 +54,7 @@ class HypernetworkModule(torch.nn.Module): linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1])))
# Add dropout except last layer
- if use_dropout and i < len(layer_structure) - 3:
+ if use_dropout and i < len(layer_structure) - 2:
linears.append(torch.nn.Dropout(p=0.3))
self.linear = torch.nn.Sequential(*linears)
|