diff options
-rw-r--r-- | CHANGELOG.md | 5 | ||||
-rw-r--r-- | launch.py | 48 | ||||
-rw-r--r-- | scripts/prompts_from_file.py | 2 |
3 files changed, 18 insertions, 37 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md index cf3fef3d..d1727864 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ -## Upcoming 1.2.0
+## 1.2.0
### Features:
- * do not load wait for stable diffusion model to load at startup
+ * do not wait for stable diffusion model to load at startup
* add filename patterns: [denoising]
* directory hiding for extra networks: dirs starting with . will hide their cards on extra network tabs unless specifically searched for
* Lora: for the `<...>` text in prompt, use name of Lora that is in the metdata of the file, if present, instead of filename (both can be used to activate lora)
@@ -40,6 +40,7 @@ * Fix MPS on PyTorch 2.0.1, Intel Macs
* make it so that custom context menu from contextMenu.js only disappears after user's click, ignoring non-user click events
* prevent Reload UI button/link from reloading the page when it's not yet ready
+ * fix prompts from file script failing to read contents from a drag/drop file
## 1.1.1
@@ -5,6 +5,7 @@ import sys import importlib.util
import platform
import json
+from functools import lru_cache
from modules import cmd_args
from modules.paths_internal import script_path, extensions_dir
@@ -14,8 +15,6 @@ args, _ = cmd_args.parser.parse_known_args() python = sys.executable
git = os.environ.get('GIT', "git")
index_url = os.environ.get('INDEX_URL', "")
-stored_commit_hash = None
-stored_git_tag = None
dir_repos = "repositories"
# Whether to default to printing command output
@@ -56,32 +55,20 @@ Use --skip-python-version-check to suppress this warning. """)
+@lru_cache()
def commit_hash():
- global stored_commit_hash
-
- if stored_commit_hash is not None:
- return stored_commit_hash
-
try:
- stored_commit_hash = run(f"{git} rev-parse HEAD").strip()
+ return subprocess.check_output(f"{git} rev-parse HEAD", encoding='utf8').strip()
except Exception:
- stored_commit_hash = "<none>"
-
- return stored_commit_hash
+ return "<none>"
+@lru_cache()
def git_tag():
- global stored_git_tag
-
- if stored_git_tag is not None:
- return stored_git_tag
-
try:
- stored_git_tag = run(f"{git} describe --tags").strip()
+ return subprocess.check_output(f"{git} describe --tags", encoding='utf8').strip()
except Exception:
- stored_git_tag = "<none>"
-
- return stored_git_tag
+ return "<none>"
def run(command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live) -> str:
@@ -116,11 +103,6 @@ def run(command, desc=None, errdesc=None, custom_env=None, live: bool = default_ return (result.stdout or "")
-def check_run(command):
- result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
- return result.returncode == 0
-
-
def is_installed(package):
try:
spec = importlib.util.find_spec(package)
@@ -134,10 +116,6 @@ def repo_dir(name): return os.path.join(script_path, dir_repos, name)
-def run_python(code, desc=None, errdesc=None):
- return run(f'"{python}" -c "{code}"', desc, errdesc)
-
-
def run_pip(command, desc=None, live=default_command_live):
if args.skip_install:
return
@@ -146,8 +124,9 @@ def run_pip(command, desc=None, live=default_command_live): return run(f'"{python}" -m pip {command} --prefer-binary{index_url_line}', desc=f"Installing {desc}", errdesc=f"Couldn't install {desc}", live=live)
-def check_run_python(code):
- return check_run(f'"{python}" -c "{code}"')
+def check_run_python(code: str) -> bool:
+ result = subprocess.run([python, "-c", code], capture_output=True, shell=True)
+ return result.returncode == 0
def git_clone(url, dir, name, commithash=None):
@@ -274,8 +253,11 @@ def prepare_environment(): if args.reinstall_torch or not is_installed("torch") or not is_installed("torchvision"):
run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True)
- if not args.skip_torch_cuda_test:
- run_python("import torch; assert torch.cuda.is_available(), 'Torch is not able to use GPU; add --skip-torch-cuda-test to COMMANDLINE_ARGS variable to disable this check'")
+ if not args.skip_torch_cuda_test and not check_run_python("import torch; assert torch.cuda.is_available()"):
+ raise RuntimeError(
+ 'Torch is not able to use GPU; '
+ 'add --skip-torch-cuda-test to COMMANDLINE_ARGS variable to disable this check'
+ )
if not is_installed("gfpgan"):
run_pip(f"install {gfpgan_package}", "gfpgan")
diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 2378816f..b918a764 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -103,8 +103,6 @@ def load_prompt_file(file): return None, "\n".join(lines), gr.update(lines=7)
-
-
class Script(scripts.Script):
def title(self):
return "Prompts from file or textbox"
|