diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-07-08 13:50:23 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-07-08 13:50:23 +0000 |
commit | 993dd9a8927407de8d19142cacb07e6f76686a67 (patch) | |
tree | 89df31c33ecf054c9cb70aaf38fc3382f306f450 /modules/mac_specific.py | |
parent | ff6acd35d0807a4e0c3ee86cdb1520a4a3a11cdd (diff) | |
parent | d7d6e8cfc8b85a99a48f82975ee213d487783c28 (diff) | |
download | stable-diffusion-webui-gfx803-993dd9a8927407de8d19142cacb07e6f76686a67.tar.gz stable-diffusion-webui-gfx803-993dd9a8927407de8d19142cacb07e6f76686a67.tar.bz2 stable-diffusion-webui-gfx803-993dd9a8927407de8d19142cacb07e6f76686a67.zip |
Merge branch 'dev' into patch-1
Diffstat (limited to 'modules/mac_specific.py')
-rw-r--r-- | modules/mac_specific.py | 28 |
1 files changed, 16 insertions, 12 deletions
diff --git a/modules/mac_specific.py b/modules/mac_specific.py index 40ce2101..735847f5 100644 --- a/modules/mac_specific.py +++ b/modules/mac_specific.py @@ -1,20 +1,24 @@ import torch import platform -from modules import paths from modules.sd_hijack_utils import CondFunc from packaging import version -# has_mps is only available in nightly pytorch (for now) and macOS 12.3+. -# check `getattr` and try it for compatibility +# before torch version 1.13, has_mps is only available in nightly pytorch and macOS 12.3+, +# use check `getattr` and try it for compatibility. +# in torch version 1.13, backends.mps.is_available() and backends.mps.is_built() are introduced in to check mps availabilty, +# since torch 2.0.1+ nightly build, getattr(torch, 'has_mps', False) was deprecated, see https://github.com/pytorch/pytorch/pull/103279 def check_for_mps() -> bool: - if not getattr(torch, 'has_mps', False): - return False - try: - torch.zeros(1).to(torch.device("mps")) - return True - except Exception: - return False + if version.parse(torch.__version__) <= version.parse("2.0.1"): + if not getattr(torch, 'has_mps', False): + return False + try: + torch.zeros(1).to(torch.device("mps")) + return True + except Exception: + return False + else: + return torch.backends.mps.is_available() and torch.backends.mps.is_built() has_mps = check_for_mps() @@ -43,7 +47,7 @@ if has_mps: # MPS workaround for https://github.com/pytorch/pytorch/issues/79383 CondFunc('torch.Tensor.to', lambda orig_func, self, *args, **kwargs: orig_func(self.contiguous(), *args, **kwargs), lambda _, self, *args, **kwargs: self.device.type != 'mps' and (args and isinstance(args[0], torch.device) and args[0].type == 'mps' or isinstance(kwargs.get('device'), torch.device) and kwargs['device'].type == 'mps')) - # MPS workaround for https://github.com/pytorch/pytorch/issues/80800 + # MPS workaround for https://github.com/pytorch/pytorch/issues/80800 CondFunc('torch.nn.functional.layer_norm', lambda orig_func, *args, **kwargs: orig_func(*([args[0].contiguous()] + list(args[1:])), **kwargs), lambda _, *args, **kwargs: args and isinstance(args[0], torch.Tensor) and args[0].device.type == 'mps') # MPS workaround for https://github.com/pytorch/pytorch/issues/90532 @@ -61,4 +65,4 @@ if has_mps: # MPS workaround for https://github.com/pytorch/pytorch/issues/92311 if platform.processor() == 'i386': for funcName in ['torch.argmax', 'torch.Tensor.argmax']: - CondFunc(funcName, lambda _, input, *args, **kwargs: torch.max(input.float() if input.dtype == torch.int64 else input, *args, **kwargs)[1], lambda _, input, *args, **kwargs: input.device.type == 'mps')
\ No newline at end of file + CondFunc(funcName, lambda _, input, *args, **kwargs: torch.max(input.float() if input.dtype == torch.int64 else input, *args, **kwargs)[1], lambda _, input, *args, **kwargs: input.device.type == 'mps') |