aboutsummaryrefslogtreecommitdiff
path: root/modules/sd_models.py
diff options
context:
space:
mode:
authorKohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com>2023-12-02 22:06:47 +0800
committerKohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com>2023-12-02 22:06:47 +0800
commit50a21cb09fe3e9ea2d4fe058e0484e192c8a86e3 (patch)
tree58a7bb3327708fc9a7a7e76a5348c1cd71e3d251 /modules/sd_models.py
parent110485d5bb511ab01ac3d890f1deca0502f4c7db (diff)
Ensure the cached weight will not be affected
Diffstat (limited to 'modules/sd_models.py')
-rw-r--r--modules/sd_models.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/sd_models.py b/modules/sd_models.py
index 4b8a9ae6..dcf816b3 100644
--- a/modules/sd_models.py
+++ b/modules/sd_models.py
@@ -435,9 +435,9 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer
for module in model.modules():
if isinstance(module, (torch.nn.Conv2d, torch.nn.Linear)):
if shared.opts.cache_fp16_weight:
- module.fp16_weight = module.weight.clone().half()
+ module.fp16_weight = module.weight.data.clone().cpu().half()
if module.bias is not None:
- module.fp16_bias = module.bias.clone().half()
+ module.fp16_bias = module.bias.data.clone().cpu().half()
module.to(torch.float8_e4m3fn)
model.first_stage_model = first_stage
timer.record("apply fp8")