diff options
author | guaneec <guaneec@users.noreply.github.com> | 2022-10-26 07:15:19 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-26 07:15:19 +0000 |
commit | b6a8bb123bd519736306417399f6441e504f1e8b (patch) | |
tree | 3e9a46dc12fb264ab284163bb7f636b69ef8d1d6 /modules/hypernetworks/hypernetwork.py | |
parent | 91bb35b1e6842b30ce7553009c8ecea3643de8d2 (diff) | |
download | stable-diffusion-webui-gfx803-b6a8bb123bd519736306417399f6441e504f1e8b.tar.gz stable-diffusion-webui-gfx803-b6a8bb123bd519736306417399f6441e504f1e8b.tar.bz2 stable-diffusion-webui-gfx803-b6a8bb123bd519736306417399f6441e504f1e8b.zip |
Fix merge
Diffstat (limited to 'modules/hypernetworks/hypernetwork.py')
-rw-r--r-- | modules/hypernetworks/hypernetwork.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/hypernetworks/hypernetwork.py b/modules/hypernetworks/hypernetwork.py index bd171793..2997cead 100644 --- a/modules/hypernetworks/hypernetwork.py +++ b/modules/hypernetworks/hypernetwork.py @@ -60,7 +60,7 @@ class HypernetworkModule(torch.nn.Module): linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1])))
# Add dropout except last layer
- if use_dropout and i < len(layer_structure) - 2:
+ if use_dropout and i < len(layer_structure) - 3:
linears.append(torch.nn.Dropout(p=0.3))
self.linear = torch.nn.Sequential(*linears)
@@ -126,7 +126,7 @@ class Hypernetwork: filename = None
name = None
- def __init__(self, name=None, enable_sizes=None, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False, activate_output=False)
+ def __init__(self, name=None, enable_sizes=None, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False, activate_output=False):
self.filename = None
self.name = name
self.layers = {}
|