aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin/Lora/lora.py
diff options
context:
space:
mode:
authorKarun <karun.ellango7@gmail.com>2023-03-25 05:12:55 -0400
committerGitHub <noreply@github.com>2023-03-25 05:12:55 -0400
commit63a2f8d8225228a52b3ca7f19d2ba1fd07a6234b (patch)
tree9a7c38070d83b409895704125525dfc70cc21215 /extensions-builtin/Lora/lora.py
parentca2b8faa83076a21dd14c974f03f88eb6da57485 (diff)
parent70615448b2ef3285dba9bb1992974cb1eaf10995 (diff)
Merge branch 'master' into master
Diffstat (limited to 'extensions-builtin/Lora/lora.py')
-rw-r--r--extensions-builtin/Lora/lora.py22
1 files changed, 21 insertions, 1 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py
index cb8f1d36..7c371deb 100644
--- a/extensions-builtin/Lora/lora.py
+++ b/extensions-builtin/Lora/lora.py
@@ -3,7 +3,9 @@ import os
import re
import torch
-from modules import shared, devices, sd_models
+from modules import shared, devices, sd_models, errors
+
+metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20}
re_digits = re.compile(r"\d+")
re_unet_down_blocks = re.compile(r"lora_unet_down_blocks_(\d+)_attentions_(\d+)_(.+)")
@@ -43,6 +45,23 @@ class LoraOnDisk:
def __init__(self, name, filename):
self.name = name
self.filename = filename
+ self.metadata = {}
+
+ _, ext = os.path.splitext(filename)
+ if ext.lower() == ".safetensors":
+ try:
+ self.metadata = sd_models.read_metadata_from_safetensors(filename)
+ except Exception as e:
+ errors.display(e, f"reading lora {filename}")
+
+ if self.metadata:
+ m = {}
+ for k, v in sorted(self.metadata.items(), key=lambda x: metadata_tags_order.get(x[0], 999)):
+ m[k] = v
+
+ self.metadata = m
+
+ self.ssmd_cover_images = self.metadata.pop('ssmd_cover_images', None) # those are cover images and they are too big to display in UI as text
class LoraModule:
@@ -159,6 +178,7 @@ def load_loras(names, multipliers=None):
def lora_forward(module, input, res):
+ input = devices.cond_cast_unet(input)
if len(loaded_loras) == 0:
return res