From 7fa5ee54b15904bef6598800df76ba1291d44ec6 Mon Sep 17 00:00:00 2001 From: catboxanon <122327233+catboxanon@users.noreply.github.com> Date: Sun, 13 Aug 2023 02:32:54 -0400 Subject: Support search and display of hashes for all extra network items --- extensions-builtin/Lora/ui_extra_networks_lora.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py index 3629e5c0..55409a78 100644 --- a/extensions-builtin/Lora/ui_extra_networks_lora.py +++ b/extensions-builtin/Lora/ui_extra_networks_lora.py @@ -25,9 +25,10 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage): item = { "name": name, "filename": lora_on_disk.filename, + "shorthash": lora_on_disk.shorthash, "preview": self.find_preview(path), "description": self.find_description(path), - "search_term": self.search_terms_from_path(lora_on_disk.filename), + "search_term": self.search_terms_from_path(lora_on_disk.filename) + " " + (lora_on_disk.hash or ""), "local_preview": f"{path}.{shared.opts.samples_format}", "metadata": lora_on_disk.metadata, "sort_keys": {'default': index, **self.get_sort_keys(lora_on_disk.filename)}, -- cgit v1.2.1 From d8419762c1454ba51baa710d9ce8e762efc056ef Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sun, 13 Aug 2023 15:07:37 +0300 Subject: Lora: output warnings in UI rather than fail for unfitting loras; switch to logging for error output in console --- extensions-builtin/Lora/extra_networks_lora.py | 12 ++++- extensions-builtin/Lora/networks.py | 61 ++++++++++++++++---------- extensions-builtin/Lora/scripts/lora_script.py | 6 +-- 3 files changed, 50 insertions(+), 29 deletions(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/extra_networks_lora.py b/extensions-builtin/Lora/extra_networks_lora.py index ba2945c6..32e32cab 100644 --- a/extensions-builtin/Lora/extra_networks_lora.py +++ b/extensions-builtin/Lora/extra_networks_lora.py @@ -1,4 +1,4 @@ -from modules import extra_networks, shared +from modules import extra_networks, shared, sd_hijack import networks @@ -6,9 +6,14 @@ class ExtraNetworkLora(extra_networks.ExtraNetwork): def __init__(self): super().__init__('lora') + self.errors = {} + """mapping of network names to the number of errors the network had during operation""" + def activate(self, p, params_list): additional = shared.opts.sd_lora + self.errors.clear() + if additional != "None" and additional in networks.available_networks and not any(x for x in params_list if x.items[0] == additional): p.all_prompts = [x + f"" for x in p.all_prompts] params_list.append(extra_networks.ExtraNetworkParams(items=[additional, shared.opts.extra_networks_default_multiplier])) @@ -56,4 +61,7 @@ class ExtraNetworkLora(extra_networks.ExtraNetwork): p.extra_generation_params["Lora hashes"] = ", ".join(network_hashes) def deactivate(self, p): - pass + if self.errors: + p.comment("Networks with errors: " + ", ".join(f"{k} ({v})" for k, v in self.errors.items())) + + self.errors.clear() diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index ba621139..c252ed9e 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -1,3 +1,4 @@ +import logging import os import re @@ -194,7 +195,7 @@ def load_network(name, network_on_disk): net.modules[key] = net_module if keys_failed_to_match: - print(f"Failed to match keys when loading network {network_on_disk.filename}: {keys_failed_to_match}") + logging.debug(f"Network {network_on_disk.filename} didn't match keys: {keys_failed_to_match}") return net @@ -207,7 +208,6 @@ def purge_networks_from_memory(): devices.torch_gc() - def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=None): already_loaded = {} @@ -248,7 +248,7 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No if net is None: failed_to_load_networks.append(name) - print(f"Couldn't find network with name {name}") + logging.info(f"Couldn't find network with name {name}") continue net.te_multiplier = te_multipliers[i] if te_multipliers else 1.0 @@ -257,7 +257,7 @@ def load_networks(names, te_multipliers=None, unet_multipliers=None, dyn_dims=No loaded_networks.append(net) if failed_to_load_networks: - sd_hijack.model_hijack.comments.append("Failed to find networks: " + ", ".join(failed_to_load_networks)) + sd_hijack.model_hijack.comments.append("Networks not found: " + ", ".join(failed_to_load_networks)) purge_networks_from_memory() @@ -314,17 +314,22 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn for net in loaded_networks: module = net.modules.get(network_layer_name, None) if module is not None and hasattr(self, 'weight'): - with torch.no_grad(): - updown, ex_bias = module.calc_updown(self.weight) + try: + with torch.no_grad(): + updown, ex_bias = module.calc_updown(self.weight) - if len(self.weight.shape) == 4 and self.weight.shape[1] == 9: - # inpainting model. zero pad updown to make channel[1] 4 to 9 - updown = torch.nn.functional.pad(updown, (0, 0, 0, 0, 0, 5)) + if len(self.weight.shape) == 4 and self.weight.shape[1] == 9: + # inpainting model. zero pad updown to make channel[1] 4 to 9 + updown = torch.nn.functional.pad(updown, (0, 0, 0, 0, 0, 5)) - self.weight += updown - if ex_bias is not None and getattr(self, 'bias', None) is not None: - self.bias += ex_bias - continue + self.weight += updown + if ex_bias is not None and getattr(self, 'bias', None) is not None: + self.bias += ex_bias + except RuntimeError as e: + logging.debug(f"Network {net.name} layer {network_layer_name}: {e}") + extra_network_lora.errors[net.name] = extra_network_lora.errors.get(net.name, 0) + 1 + + continue module_q = net.modules.get(network_layer_name + "_q_proj", None) module_k = net.modules.get(network_layer_name + "_k_proj", None) @@ -332,21 +337,28 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn module_out = net.modules.get(network_layer_name + "_out_proj", None) if isinstance(self, torch.nn.MultiheadAttention) and module_q and module_k and module_v and module_out: - with torch.no_grad(): - updown_q = module_q.calc_updown(self.in_proj_weight) - updown_k = module_k.calc_updown(self.in_proj_weight) - updown_v = module_v.calc_updown(self.in_proj_weight) - updown_qkv = torch.vstack([updown_q, updown_k, updown_v]) - updown_out = module_out.calc_updown(self.out_proj.weight) - - self.in_proj_weight += updown_qkv - self.out_proj.weight += updown_out - continue + try: + with torch.no_grad(): + updown_q = module_q.calc_updown(self.in_proj_weight) + updown_k = module_k.calc_updown(self.in_proj_weight) + updown_v = module_v.calc_updown(self.in_proj_weight) + updown_qkv = torch.vstack([updown_q, updown_k, updown_v]) + updown_out = module_out.calc_updown(self.out_proj.weight) + + self.in_proj_weight += updown_qkv + self.out_proj.weight += updown_out + + except RuntimeError as e: + logging.debug(f"Network {net.name} layer {network_layer_name}: {e}") + extra_network_lora.errors[net.name] = extra_network_lora.errors.get(net.name, 0) + 1 + + continue if module is None: continue - print(f'failed to calculate network weights for layer {network_layer_name}') + logging.debug(f"Network {net.name} layer {network_layer_name}: couldn't find supported operation") + extra_network_lora.errors[net.name] = extra_network_lora.errors.get(net.name, 0) + 1 self.network_current_names = wanted_names @@ -519,6 +531,7 @@ def infotext_pasted(infotext, params): if added: params["Prompt"] += "\n" + "".join(added) +extra_network_lora = None available_networks = {} available_network_aliases = {} diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index dc307f8c..4c6e774a 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -23,9 +23,9 @@ def unload(): def before_ui(): ui_extra_networks.register_page(ui_extra_networks_lora.ExtraNetworksPageLora()) - extra_network = extra_networks_lora.ExtraNetworkLora() - extra_networks.register_extra_network(extra_network) - extra_networks.register_extra_network_alias(extra_network, "lyco") + networks.extra_network_lora = extra_networks_lora.ExtraNetworkLora() + extra_networks.register_extra_network(networks.extra_network_lora) + extra_networks.register_extra_network_alias(networks.extra_network_lora, "lyco") if not hasattr(torch.nn, 'Linear_forward_before_network'): -- cgit v1.2.1 From db40d26d086b6e980bd28ce7eb3008ec09b1f0e8 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sun, 13 Aug 2023 16:38:10 +0300 Subject: linter --- extensions-builtin/Lora/extra_networks_lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/extra_networks_lora.py b/extensions-builtin/Lora/extra_networks_lora.py index 32e32cab..005ff32c 100644 --- a/extensions-builtin/Lora/extra_networks_lora.py +++ b/extensions-builtin/Lora/extra_networks_lora.py @@ -1,4 +1,4 @@ -from modules import extra_networks, shared, sd_hijack +from modules import extra_networks, shared import networks -- cgit v1.2.1