diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-21 06:57:55 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-21 06:57:55 +0000 |
commit | 0c5522ea211370046a2ca628e92d35a7c8c97972 (patch) | |
tree | 7c66a3eba0a8a8149900056c6511146182b02276 /modules/interrogate.py | |
parent | 858462f719c22ca9f24b94a41699653c34b5f4fb (diff) | |
parent | 2273e752fb3e578f1047f6d38b96330b07bf61a9 (diff) | |
download | stable-diffusion-webui-gfx803-0c5522ea211370046a2ca628e92d35a7c8c97972.tar.gz stable-diffusion-webui-gfx803-0c5522ea211370046a2ca628e92d35a7c8c97972.tar.bz2 stable-diffusion-webui-gfx803-0c5522ea211370046a2ca628e92d35a7c8c97972.zip |
Merge branch 'master' into training-help-text
Diffstat (limited to 'modules/interrogate.py')
-rw-r--r-- | modules/interrogate.py | 12 |
1 files changed, 9 insertions, 3 deletions
diff --git a/modules/interrogate.py b/modules/interrogate.py index 64b91eb4..65b05d34 100644 --- a/modules/interrogate.py +++ b/modules/interrogate.py @@ -28,9 +28,11 @@ class InterrogateModels: clip_preprocess = None
categories = None
dtype = None
+ running_on_cpu = None
def __init__(self, content_dir):
self.categories = []
+ self.running_on_cpu = devices.device_interrogate == torch.device("cpu")
if os.path.exists(content_dir):
for filename in os.listdir(content_dir):
@@ -53,7 +55,11 @@ class InterrogateModels: def load_clip_model(self):
import clip
- model, preprocess = clip.load(clip_model_name)
+ if self.running_on_cpu:
+ model, preprocess = clip.load(clip_model_name, device="cpu")
+ else:
+ model, preprocess = clip.load(clip_model_name)
+
model.eval()
model = model.to(devices.device_interrogate)
@@ -62,14 +68,14 @@ class InterrogateModels: def load(self):
if self.blip_model is None:
self.blip_model = self.load_blip_model()
- if not shared.cmd_opts.no_half:
+ if not shared.cmd_opts.no_half and not self.running_on_cpu:
self.blip_model = self.blip_model.half()
self.blip_model = self.blip_model.to(devices.device_interrogate)
if self.clip_model is None:
self.clip_model, self.clip_preprocess = self.load_clip_model()
- if not shared.cmd_opts.no_half:
+ if not shared.cmd_opts.no_half and not self.running_on_cpu:
self.clip_model = self.clip_model.half()
self.clip_model = self.clip_model.to(devices.device_interrogate)
|