aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--extensions-builtin/Lora/network.py6
-rw-r--r--extensions-builtin/Lora/networks.py2
2 files changed, 4 insertions, 4 deletions
diff --git a/extensions-builtin/Lora/network.py b/extensions-builtin/Lora/network.py
index b7b89061..d8e8dfb7 100644
--- a/extensions-builtin/Lora/network.py
+++ b/extensions-builtin/Lora/network.py
@@ -145,10 +145,10 @@ class NetworkModule:
if orig_weight.size().numel() == updown.size().numel():
updown = updown.reshape(orig_weight.shape)
- if ex_bias is None:
- ex_bias = 0
+ if ex_bias is not None:
+ ex_bias = ex_bias * self.multiplier()
- return updown * self.calc_scale() * self.multiplier(), ex_bias * self.multiplier()
+ return updown * self.calc_scale() * self.multiplier(), ex_bias
def calc_updown(self, target):
raise NotImplementedError()
diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py
index 74cefe43..ba621139 100644
--- a/extensions-builtin/Lora/networks.py
+++ b/extensions-builtin/Lora/networks.py
@@ -322,7 +322,7 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn
updown = torch.nn.functional.pad(updown, (0, 0, 0, 0, 0, 5))
self.weight += updown
- if getattr(self, 'bias', None) is not None:
+ if ex_bias is not None and getattr(self, 'bias', None) is not None:
self.bias += ex_bias
continue