aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin/Lora
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-09-30 09:48:38 +0300
committerGitHub <noreply@github.com>2023-09-30 09:48:38 +0300
commita0e979badba4eb64ae75ad8fdd1cecf7caa979ff (patch)
treebd22f9a0a76e105fafc4b39a0a2e180900c0ff60 /extensions-builtin/Lora
parent3aa9f01bdc04d3afc18d6a9cf1a3abb2ca9d9d83 (diff)
parent7d4d871d4679b5b78ff67b501da5367413542984 (diff)
Merge pull request #13178 from wpdong0727/fix-lora-bias-backup-reset
fix: lora-bias-backup don't reset cache
Diffstat (limited to 'extensions-builtin/Lora')
-rw-r--r--extensions-builtin/Lora/networks.py1
1 files changed, 1 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py
index 96f935b2..315682b3 100644
--- a/extensions-builtin/Lora/networks.py
+++ b/extensions-builtin/Lora/networks.py
@@ -418,6 +418,7 @@ def network_forward(module, input, original_forward):
def network_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
self.network_current_names = ()
self.network_weights_backup = None
+ self.network_bias_backup = None
def network_Linear_forward(self, input):