aboutsummaryrefslogtreecommitdiff
path: root/modules/initialize.py
diff options
context:
space:
mode:
authorwangshuai09 <391746016@qq.com>2024-01-27 17:21:32 +0800
committerwangshuai09 <391746016@qq.com>2024-01-29 19:25:06 +0800
commitec124607f47371a6cfd61a795f86a7f1cbd44651 (patch)
treed60205d2f58c80a0cc0bb8a079b9f33e7bc93f53 /modules/initialize.py
parentcf2772fab0af5573da775e7437e6acdca424f26e (diff)
Add NPU Support
Diffstat (limited to 'modules/initialize.py')
-rw-r--r--modules/initialize.py6
1 files changed, 5 insertions, 1 deletions
diff --git a/modules/initialize.py b/modules/initialize.py
index ac95fc6f..3285cc3c 100644
--- a/modules/initialize.py
+++ b/modules/initialize.py
@@ -143,13 +143,17 @@ def initialize_rest(*, reload_script_modules=False):
its optimization may be None because the list of optimizaers has neet been filled
by that time, so we apply optimization again.
"""
+ from modules import devices
+ # Work around due to bug in torch_npu, revert me after fixed, @see https://gitee.com/ascend/pytorch/issues/I8KECW?from=project-issue
+ if devices.npu_specific.has_npu:
+ import torch
+ torch.npu.set_device(0)
shared.sd_model # noqa: B018
if sd_hijack.current_optimizer is None:
sd_hijack.apply_optimizations()
- from modules import devices
devices.first_time_calculation()
if not shared.cmd_opts.skip_load_model_at_start:
Thread(target=load_model).start()