diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-22 16:32:13 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-22 16:32:13 +0000 |
commit | ffea9b1509fff641f6a4f32d57941b4c75918c92 (patch) | |
tree | c4612f99c2b08deff11d82bffdf652098f2a2928 /modules/hypernetworks/ui.py | |
parent | e38625011cd4955da4bc67fe95d1d0f4c0c53899 (diff) | |
parent | 6a4fa73a38935a18779ce1809892730fd1572bee (diff) | |
download | stable-diffusion-webui-gfx803-ffea9b1509fff641f6a4f32d57941b4c75918c92.tar.gz stable-diffusion-webui-gfx803-ffea9b1509fff641f6a4f32d57941b4c75918c92.tar.bz2 stable-diffusion-webui-gfx803-ffea9b1509fff641f6a4f32d57941b4c75918c92.zip |
Merge pull request #3414 from discus0434/master
[Hypernetworks] Add a feature to use dropout / more activation functions
Diffstat (limited to 'modules/hypernetworks/ui.py')
-rw-r--r-- | modules/hypernetworks/ui.py | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/modules/hypernetworks/ui.py b/modules/hypernetworks/ui.py index e6f50a1f..2b472d87 100644 --- a/modules/hypernetworks/ui.py +++ b/modules/hypernetworks/ui.py @@ -3,14 +3,13 @@ import os import re
import gradio as gr
-
-import modules.textual_inversion.textual_inversion
import modules.textual_inversion.preprocess
-from modules import sd_hijack, shared, devices
+import modules.textual_inversion.textual_inversion
+from modules import devices, sd_hijack, shared
from modules.hypernetworks import hypernetwork
-def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, add_layer_norm=False, activation_func=None):
+def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, activation_func=None, add_layer_norm=False, use_dropout=False):
# Remove illegal characters from name.
name = "".join( x for x in name if (x.isalnum() or x in "._- "))
@@ -25,8 +24,9 @@ def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, name=name,
enable_sizes=[int(x) for x in enable_sizes],
layer_structure=layer_structure,
- add_layer_norm=add_layer_norm,
activation_func=activation_func,
+ add_layer_norm=add_layer_norm,
+ use_dropout=use_dropout,
)
hypernet.save(fn)
|