From f741a98baccae100fcfb40c017b5c35c5cba1b0c Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 08:43:42 +0300 Subject: imports cleanup for ruff --- extensions-builtin/Lora/lora.py | 1 - 1 file changed, 1 deletion(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index ba1293df..0ab43229 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -1,4 +1,3 @@ -import glob import os import re import torch -- cgit v1.2.1 From 028d3f6425d85f122027c127fba8bcbf4f66ee75 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 11:05:02 +0300 Subject: ruff auto fixes --- extensions-builtin/Lora/lora.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 0ab43229..9795540f 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -172,7 +172,7 @@ def load_lora(name, filename): else: print(f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}') continue - assert False, f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}' + raise AssertionError(f"Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}") with torch.no_grad(): module.weight.copy_(weight) @@ -184,7 +184,7 @@ def load_lora(name, filename): elif lora_key == "lora_down.weight": lora_module.down = module else: - assert False, f'Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha' + raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha") if len(keys_failed_to_match) > 0: print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}") @@ -202,7 +202,7 @@ def load_loras(names, multipliers=None): loaded_loras.clear() loras_on_disk = [available_lora_aliases.get(name, None) for name in names] - if any([x is None for x in loras_on_disk]): + if any(x is None for x in loras_on_disk): list_available_loras() loras_on_disk = [available_lora_aliases.get(name, None) for name in names] @@ -309,7 +309,7 @@ def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.Mu print(f'failed to calculate lora weights for layer {lora_layer_name}') - setattr(self, "lora_current_names", wanted_names) + self.lora_current_names = wanted_names def lora_forward(module, input, original_forward): @@ -343,8 +343,8 @@ def lora_forward(module, input, original_forward): def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]): - setattr(self, "lora_current_names", ()) - setattr(self, "lora_weights_backup", None) + self.lora_current_names = () + self.lora_weights_backup = None def lora_Linear_forward(self, input): -- cgit v1.2.1 From a5121e7a0623db328a9462d340d389ed6737374a Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 11:37:18 +0300 Subject: fixes for B007 --- extensions-builtin/Lora/lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 9795540f..7b56136f 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -418,7 +418,7 @@ def infotext_pasted(infotext, params): added = [] - for k, v in params.items(): + for k in params: if not k.startswith("AddNet Model "): continue -- cgit v1.2.1 From 44c37f94e176667ccdfeb74916e4640fa9dc586d Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Thu, 18 May 2023 16:36:30 +0300 Subject: add messages about Loras that failed to load to UI --- extensions-builtin/Lora/lora.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 1308c48b..fa57d466 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -3,7 +3,7 @@ import re import torch from typing import Union -from modules import shared, devices, sd_models, errors, scripts +from modules import shared, devices, sd_models, errors, scripts, sd_hijack metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20} @@ -211,6 +211,8 @@ def load_loras(names, multipliers=None): loras_on_disk = [available_lora_aliases.get(name, None) for name in names] + failed_to_load_loras = [] + for i, name in enumerate(names): lora = already_loaded.get(name, None) @@ -224,12 +226,16 @@ def load_loras(names, multipliers=None): continue if lora is None: + failed_to_load_loras.append(name) print(f"Couldn't find Lora with name {name}") continue lora.multiplier = multipliers[i] if multipliers else 1.0 loaded_loras.append(lora) + if len(failed_to_load_loras) > 0: + sd_hijack.model_hijack.comments.append("Failed to find Loras: " + ", ".join(failed_to_load_loras)) + def lora_calc_updown(lora, module, target): with torch.no_grad(): -- cgit v1.2.1 From 39ec4f06ffb2c26e1298b2c5d80874dc3fd693ac Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Fri, 19 May 2023 22:59:29 +0300 Subject: calculate hashes for Lora add lora hashes to infotext when pasting infotext, use infotext's lora hashes to find local loras for entries whose hashes match loras the user has --- extensions-builtin/Lora/lora.py | 59 +++++++++++++++++++++++++++++++++-------- 1 file changed, 48 insertions(+), 11 deletions(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index fa57d466..eec14712 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -3,7 +3,7 @@ import re import torch from typing import Union -from modules import shared, devices, sd_models, errors, scripts, sd_hijack +from modules import shared, devices, sd_models, errors, scripts, sd_hijack, hashes metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20} @@ -76,9 +76,9 @@ class LoraOnDisk: self.name = name self.filename = filename self.metadata = {} + self.is_safetensors = os.path.splitext(filename)[1].lower() == ".safetensors" - _, ext = os.path.splitext(filename) - if ext.lower() == ".safetensors": + if self.is_safetensors: try: self.metadata = sd_models.read_metadata_from_safetensors(filename) except Exception as e: @@ -94,14 +94,43 @@ class LoraOnDisk: self.ssmd_cover_images = self.metadata.pop('ssmd_cover_images', None) # those are cover images and they are too big to display in UI as text self.alias = self.metadata.get('ss_output_name', self.name) + self.hash = None + self.shorthash = None + self.set_hash( + self.metadata.get('sshs_model_hash') or + hashes.sha256_from_cache(self.filename, "lora/" + self.name, use_addnet_hash=self.is_safetensors) or + '' + ) + + def set_hash(self, v): + self.hash = v + self.shorthash = self.hash[0:12] + + if self.shorthash: + available_lora_hash_lookup[self.shorthash] = self + + def read_hash(self): + if not self.hash: + self.set_hash(hashes.sha256(self.filename, "lora/" + self.name, use_addnet_hash=self.is_safetensors) or '') + + def get_alias(self): + if shared.opts.lora_preferred_name == "Filename" or self.alias.lower() in forbidden_lora_aliases: + return self.name + else: + return self.alias + class LoraModule: - def __init__(self, name): + def __init__(self, name, lora_on_disk: LoraOnDisk): self.name = name + self.lora_on_disk = lora_on_disk self.multiplier = 1.0 self.modules = {} self.mtime = None + self.mentioned_name = None + """the text that was used to add lora to prompt - can be either name or an alias""" + class LoraUpDownModule: def __init__(self): @@ -126,11 +155,11 @@ def assign_lora_names_to_compvis_modules(sd_model): sd_model.lora_layer_mapping = lora_layer_mapping -def load_lora(name, filename): - lora = LoraModule(name) - lora.mtime = os.path.getmtime(filename) +def load_lora(name, lora_on_disk): + lora = LoraModule(name, lora_on_disk) + lora.mtime = os.path.getmtime(lora_on_disk.filename) - sd = sd_models.read_state_dict(filename) + sd = sd_models.read_state_dict(lora_on_disk.filename) # this should not be needed but is here as an emergency fix for an unknown error people are experiencing in 1.2.0 if not hasattr(shared.sd_model, 'lora_layer_mapping'): @@ -191,7 +220,7 @@ def load_lora(name, filename): raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha") if len(keys_failed_to_match) > 0: - print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}") + print(f"Failed to match keys when loading Lora {lora_on_disk.filename}: {keys_failed_to_match}") return lora @@ -217,14 +246,19 @@ def load_loras(names, multipliers=None): lora = already_loaded.get(name, None) lora_on_disk = loras_on_disk[i] + if lora_on_disk is not None: if lora is None or os.path.getmtime(lora_on_disk.filename) > lora.mtime: try: - lora = load_lora(name, lora_on_disk.filename) + lora = load_lora(name, lora_on_disk) except Exception as e: errors.display(e, f"loading Lora {lora_on_disk.filename}") continue + lora.mentioned_name = name + + lora_on_disk.read_hash() + if lora is None: failed_to_load_loras.append(name) print(f"Couldn't find Lora with name {name}") @@ -403,7 +437,8 @@ def list_available_loras(): available_loras.clear() available_lora_aliases.clear() forbidden_lora_aliases.clear() - forbidden_lora_aliases.update({"none": 1}) + available_lora_hash_lookup.clear() + forbidden_lora_aliases.update({"none": 1, "Addams": 1}) os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True) @@ -457,8 +492,10 @@ def infotext_pasted(infotext, params): if added: params["Prompt"] += "\n" + "".join(added) + available_loras = {} available_lora_aliases = {} +available_lora_hash_lookup = {} forbidden_lora_aliases = {} loaded_loras = [] -- cgit v1.2.1