diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-12-10 06:17:39 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-12-10 06:17:39 +0000 |
commit | 505ec7e4d960e7bea579182509050fafb10bd00c (patch) | |
tree | 8009bae770532d0fa5f6f857414b44fb4830e259 /modules | |
parent | 7dbfd8a7d8aefec7283b456c6f5b000ae4d3496d (diff) | |
download | stable-diffusion-webui-gfx803-505ec7e4d960e7bea579182509050fafb10bd00c.tar.gz stable-diffusion-webui-gfx803-505ec7e4d960e7bea579182509050fafb10bd00c.tar.bz2 stable-diffusion-webui-gfx803-505ec7e4d960e7bea579182509050fafb10bd00c.zip |
cleanup some unneeded imports for hijack files
Diffstat (limited to 'modules')
-rw-r--r-- | modules/sd_hijack.py | 10 | ||||
-rw-r--r-- | modules/sd_hijack_optimizations.py | 3 |
2 files changed, 2 insertions, 11 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 47dbc1b7..690a9ec2 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -1,16 +1,10 @@ -import math
-import os
-import sys
-import traceback
import torch
-import numpy as np
-from torch import einsum
from torch.nn.functional import silu
import modules.textual_inversion.textual_inversion
-from modules import prompt_parser, devices, sd_hijack_optimizations, shared, sd_hijack_checkpoint
+from modules import devices, sd_hijack_optimizations, shared, sd_hijack_checkpoint
from modules.hypernetworks import hypernetwork
-from modules.shared import opts, device, cmd_opts
+from modules.shared import cmd_opts
from modules import sd_hijack_clip, sd_hijack_open_clip, sd_hijack_unet
from modules.sd_hijack_optimizations import invokeAI_mps_available
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 85909eb9..98123fbf 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -5,7 +5,6 @@ import importlib import torch
from torch import einsum
-import torch.nn.functional as F
from ldm.util import default
from einops import rearrange
@@ -13,8 +12,6 @@ from einops import rearrange from modules import shared
from modules.hypernetworks import hypernetwork
-from ldm.modules.diffusionmodules.util import timestep_embedding
-
if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers:
try:
|