From f741a98baccae100fcfb40c017b5c35c5cba1b0c Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 08:43:42 +0300 Subject: imports cleanup for ruff --- extensions-builtin/Lora/lora.py | 1 - 1 file changed, 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index ba1293df..0ab43229 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -1,4 +1,3 @@ -import glob import os import re import torch -- cgit v1.2.1 From 028d3f6425d85f122027c127fba8bcbf4f66ee75 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 11:05:02 +0300 Subject: ruff auto fixes --- extensions-builtin/Lora/lora.py | 12 ++++++------ extensions-builtin/Lora/scripts/lora_script.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 0ab43229..9795540f 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -172,7 +172,7 @@ def load_lora(name, filename): else: print(f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}') continue - assert False, f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}' + raise AssertionError(f"Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}") with torch.no_grad(): module.weight.copy_(weight) @@ -184,7 +184,7 @@ def load_lora(name, filename): elif lora_key == "lora_down.weight": lora_module.down = module else: - assert False, f'Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha' + raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha") if len(keys_failed_to_match) > 0: print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}") @@ -202,7 +202,7 @@ def load_loras(names, multipliers=None): loaded_loras.clear() loras_on_disk = [available_lora_aliases.get(name, None) for name in names] - if any([x is None for x in loras_on_disk]): + if any(x is None for x in loras_on_disk): list_available_loras() loras_on_disk = [available_lora_aliases.get(name, None) for name in names] @@ -309,7 +309,7 @@ def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.Mu print(f'failed to calculate lora weights for layer {lora_layer_name}') - setattr(self, "lora_current_names", wanted_names) + self.lora_current_names = wanted_names def lora_forward(module, input, original_forward): @@ -343,8 +343,8 @@ def lora_forward(module, input, original_forward): def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]): - setattr(self, "lora_current_names", ()) - setattr(self, "lora_weights_backup", None) + self.lora_current_names = () + self.lora_weights_backup = None def lora_Linear_forward(self, input): diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 7db971fd..b70e2de7 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -53,7 +53,7 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted) shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), { - "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras), + "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + list(lora.available_loras)}, refresh=lora.list_available_loras), })) -- cgit v1.2.1 From a5121e7a0623db328a9462d340d389ed6737374a Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 11:37:18 +0300 Subject: fixes for B007 --- extensions-builtin/Lora/lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 9795540f..7b56136f 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -418,7 +418,7 @@ def infotext_pasted(infotext, params): added = [] - for k, v in params.items(): + for k in params: if not k.startswith("AddNet Model "): continue -- cgit v1.2.1 From 3ec7b705c78b7aca9569c92a419837352c7a4ec6 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 10 May 2023 21:21:32 +0300 Subject: suggestions and fixes from the PR --- extensions-builtin/Lora/scripts/lora_script.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index b70e2de7..13d297d7 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -53,7 +53,7 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted) shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), { - "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + list(lora.available_loras)}, refresh=lora.list_available_loras), + "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None", *lora.available_loras]}, refresh=lora.list_available_loras), })) -- cgit v1.2.1 From 477199357f4f5f02d62857a0cf432a3ed19e6418 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sat, 13 May 2023 20:15:37 +0300 Subject: add an option to always refer to lora by filenames never refer to lora by an alias if multiple loras have same alias or the alias is called none --- extensions-builtin/Lora/lora.py | 6 ++++++ extensions-builtin/Lora/scripts/lora_script.py | 1 + extensions-builtin/Lora/ui_extra_networks_lora.py | 8 +++++++- 3 files changed, 14 insertions(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index ba1293df..6fa80006 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -393,6 +393,8 @@ def lora_MultiheadAttention_load_state_dict(self, *args, **kwargs): def list_available_loras(): available_loras.clear() available_lora_aliases.clear() + forbidden_lora_aliases.clear() + forbidden_lora_aliases.update({"none": 1}) os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True) @@ -406,6 +408,9 @@ def list_available_loras(): available_loras[name] = entry + if entry.alias in available_lora_aliases: + forbidden_lora_aliases[entry.alias.lower()] = 1 + available_lora_aliases[name] = entry available_lora_aliases[entry.alias] = entry @@ -445,6 +450,7 @@ def infotext_pasted(infotext, params): available_loras = {} available_lora_aliases = {} +forbidden_lora_aliases = {} loaded_loras = [] list_available_loras() diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 7db971fd..060bda05 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -54,6 +54,7 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted) shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), { "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras), + "lora_preferred_name": shared.OptionInfo("Alias from file", "When adding to prompt, refer to lora by", gr.Radio, {"choices": ["Alias from file", "Filename"]}), })) diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py index a0edbc1e..2050e3fa 100644 --- a/extensions-builtin/Lora/ui_extra_networks_lora.py +++ b/extensions-builtin/Lora/ui_extra_networks_lora.py @@ -15,13 +15,19 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage): def list_items(self): for name, lora_on_disk in lora.available_loras.items(): path, ext = os.path.splitext(lora_on_disk.filename) + + if shared.opts.lora_preferred_name == "Filename" or lora_on_disk.alias.lower() in lora.forbidden_lora_aliases: + alias = name + else: + alias = lora_on_disk.alias + yield { "name": name, "filename": path, "preview": self.find_preview(path), "description": self.find_description(path), "search_term": self.search_terms_from_path(lora_on_disk.filename), - "prompt": json.dumps(f""), + "prompt": json.dumps(f""), "local_preview": f"{path}.{shared.opts.samples_format}", "metadata": json.dumps(lora_on_disk.metadata, indent=4) if lora_on_disk.metadata else None, } -- cgit v1.2.1 From b9abdb50a3684e2f7392bf68736ad1ec025045af Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 14 May 2023 13:31:03 +0300 Subject: add a possible fix for 'LatentDiffusion' object has no attribute 'lora_layer_mapping' --- extensions-builtin/Lora/lora.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 6fa80006..b5d0c98f 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -133,6 +133,10 @@ def load_lora(name, filename): sd = sd_models.read_state_dict(filename) + # this should not be needed but is here as an emergency fix for an unknown error people are experiencing in 1.2.0 + if not hasattr(shared.sd_model, 'lora_layer_mapping'): + assign_lora_names_to_compvis_modules(shared.sd_model) + keys_failed_to_match = {} is_sd2 = 'model_transformer_resblocks' in shared.sd_model.lora_layer_mapping -- cgit v1.2.1