aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin/Lora/networks.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-08-19 08:36:20 +0300
committerGitHub <noreply@github.com>2023-08-19 08:36:20 +0300
commit448d6bef372079cbd6d5a3acd8fbfd6f03799ee3 (patch)
tree46741eb48d9588db94f56d134f8af952b71bd514 /extensions-builtin/Lora/networks.py
parent7056fdf2bee50e5952cc0bac2047e96de336a36a (diff)
parent0dc74545c0b5510911757ed9f2be703aab58f014 (diff)
Merge pull request #12599 from AUTOMATIC1111/ram_optim
RAM optimization round 2
Diffstat (limited to 'extensions-builtin/Lora/networks.py')
-rw-r--r--extensions-builtin/Lora/networks.py5
1 files changed, 4 insertions, 1 deletions
diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py
index 9fca36b6..96f935b2 100644
--- a/extensions-builtin/Lora/networks.py
+++ b/extensions-builtin/Lora/networks.py
@@ -304,7 +304,10 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn
wanted_names = tuple((x.name, x.te_multiplier, x.unet_multiplier, x.dyn_dim) for x in loaded_networks)
weights_backup = getattr(self, "network_weights_backup", None)
- if weights_backup is None:
+ if weights_backup is None and wanted_names != ():
+ if current_names != ():
+ raise RuntimeError("no backup weights found and current weights are not unchanged")
+
if isinstance(self, torch.nn.MultiheadAttention):
weights_backup = (self.in_proj_weight.to(devices.cpu, copy=True), self.out_proj.weight.to(devices.cpu, copy=True))
else: