diff options
author | brkirch <brkirch@users.noreply.github.com> | 2023-05-08 19:49:43 +0000 |
---|---|---|
committer | brkirch <brkirch@users.noreply.github.com> | 2023-05-09 05:10:13 +0000 |
commit | 9efb809f7c2f6754367cafcce02926bf954815d5 (patch) | |
tree | ee74e92f04f215754598355ac60c7ce3be24827f | |
parent | 7aab389d6fc8ad08729071b1ed9d4de64c4e44db (diff) | |
download | stable-diffusion-webui-gfx803-9efb809f7c2f6754367cafcce02926bf954815d5.tar.gz stable-diffusion-webui-gfx803-9efb809f7c2f6754367cafcce02926bf954815d5.tar.bz2 stable-diffusion-webui-gfx803-9efb809f7c2f6754367cafcce02926bf954815d5.zip |
Remove PyTorch 2.0 check
Apparently the commit in the main branch of pytorch/pytorch that fixes this issue didn't make it into PyTorch 2.0.1, and since it is unclear exactly which release will have it we'll just always apply the workaround so a crash doesn't occur regardless.
-rw-r--r-- | modules/mac_specific.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/mac_specific.py b/modules/mac_specific.py index 6fe8dea0..68bffec6 100644 --- a/modules/mac_specific.py +++ b/modules/mac_specific.py @@ -54,6 +54,6 @@ if has_mps: CondFunc('torch.cumsum', cumsum_fix_func, None) CondFunc('torch.Tensor.cumsum', cumsum_fix_func, None) CondFunc('torch.narrow', lambda orig_func, *args, **kwargs: orig_func(*args, **kwargs).clone(), None) - if version.parse(torch.__version__) == version.parse("2.0"): + # MPS workaround for https://github.com/pytorch/pytorch/issues/96113 - CondFunc('torch.nn.functional.layer_norm', lambda orig_func, x, normalized_shape, weight, bias, eps, **kwargs: orig_func(x.float(), normalized_shape, weight.float() if weight is not None else None, bias.float() if bias is not None else bias, eps).to(x.dtype), lambda *args, **kwargs: len(args) == 6) + CondFunc('torch.nn.functional.layer_norm', lambda orig_func, x, normalized_shape, weight, bias, eps, **kwargs: orig_func(x.float(), normalized_shape, weight.float() if weight is not None else None, bias.float() if bias is not None else bias, eps).to(x.dtype), lambda _, input, *args, **kwargs: len(args) == 4 and input.device.type == 'mps') |