aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin/Lora/lora.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-01-23 18:12:51 +0300
committerAUTOMATIC <16777216c@gmail.com>2023-01-23 18:12:51 +0300
commite407d1af897a7896d8c81e32dc86e7eb753ce207 (patch)
treee97a2b2bb79d0a458f6c9639a3e577c09f66c3c2 /extensions-builtin/Lora/lora.py
parente8c3d03f7d9966b81458944efb25666b2143153f (diff)
add support for loras trained on kohya's scripts 0.4.0 (alphas)
Diffstat (limited to 'extensions-builtin/Lora/lora.py')
-rw-r--r--extensions-builtin/Lora/lora.py18
1 files changed, 11 insertions, 7 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py
index da1797dc..220e64ff 100644
--- a/extensions-builtin/Lora/lora.py
+++ b/extensions-builtin/Lora/lora.py
@@ -92,6 +92,15 @@ def load_lora(name, filename):
keys_failed_to_match.append(key_diffusers)
continue
+ lora_module = lora.modules.get(key, None)
+ if lora_module is None:
+ lora_module = LoraUpDownModule()
+ lora.modules[key] = lora_module
+
+ if lora_key == "alpha":
+ lora_module.alpha = weight.item()
+ continue
+
if type(sd_module) == torch.nn.Linear:
module = torch.nn.Linear(weight.shape[1], weight.shape[0], bias=False)
elif type(sd_module) == torch.nn.Conv2d:
@@ -104,17 +113,12 @@ def load_lora(name, filename):
module.to(device=devices.device, dtype=devices.dtype)
- lora_module = lora.modules.get(key, None)
- if lora_module is None:
- lora_module = LoraUpDownModule()
- lora.modules[key] = lora_module
-
if lora_key == "lora_up.weight":
lora_module.up = module
elif lora_key == "lora_down.weight":
lora_module.down = module
else:
- assert False, f'Bad Lora layer name: {key_diffusers} - must end in lora_up.weight or lora_down.weight'
+ assert False, f'Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha'
if len(keys_failed_to_match) > 0:
print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}")
@@ -161,7 +165,7 @@ def lora_forward(module, input, res):
for lora in loaded_loras:
module = lora.modules.get(lora_layer_name, None)
if module is not None:
- res = res + module.up(module.down(input)) * lora.multiplier
+ res = res + module.up(module.down(input)) * lora.multiplier * module.alpha / module.up.weight.shape[1]
return res