From eed963e97261ee03bffe59e3d343dcf53d82dbfd Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Wed, 9 Aug 2023 16:54:49 +0300 Subject: Lora cache in memory --- extensions-builtin/Lora/scripts/lora_script.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'extensions-builtin/Lora/scripts') diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index cd28afc9..6ab8b6e7 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -65,6 +65,7 @@ shared.options_templates.update(shared.options_section(('extra_networks', "Extra "lora_add_hashes_to_infotext": shared.OptionInfo(True, "Add Lora hashes to infotext"), "lora_show_all": shared.OptionInfo(False, "Always show all networks on the Lora page").info("otherwise, those detected as for incompatible version of Stable Diffusion will be hidden"), "lora_hide_unknown_for_versions": shared.OptionInfo([], "Hide networks of unknown versions for model versions", gr.CheckboxGroup, {"choices": ["SD1", "SD2", "SDXL"]}), + "lora_in_memory_limit": shared.OptionInfo(0, "Number of Lora networks to keep cached in memory", gr.Number, {"precision": 0}), })) @@ -121,3 +122,5 @@ def infotext_pasted(infotext, d): script_callbacks.on_infotext_pasted(infotext_pasted) + +shared.opts.onchange("lora_in_memory_limit", networks.purge_networks_from_memory) -- cgit v1.2.1 From bd4da4474bef5c9c1f690c62b971704ee73d2860 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Sun, 13 Aug 2023 02:27:39 +0800 Subject: Add extra norm module into built-in lora ext refer to LyCORIS 1.9.0.dev6 add new option and module for training norm layer (Which is reported to be good for style) --- extensions-builtin/Lora/scripts/lora_script.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) (limited to 'extensions-builtin/Lora/scripts') diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 6ab8b6e7..dc307f8c 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -40,6 +40,18 @@ if not hasattr(torch.nn, 'Conv2d_forward_before_network'): if not hasattr(torch.nn, 'Conv2d_load_state_dict_before_network'): torch.nn.Conv2d_load_state_dict_before_network = torch.nn.Conv2d._load_from_state_dict +if not hasattr(torch.nn, 'GroupNorm_forward_before_network'): + torch.nn.GroupNorm_forward_before_network = torch.nn.GroupNorm.forward + +if not hasattr(torch.nn, 'GroupNorm_load_state_dict_before_network'): + torch.nn.GroupNorm_load_state_dict_before_network = torch.nn.GroupNorm._load_from_state_dict + +if not hasattr(torch.nn, 'LayerNorm_forward_before_network'): + torch.nn.LayerNorm_forward_before_network = torch.nn.LayerNorm.forward + +if not hasattr(torch.nn, 'LayerNorm_load_state_dict_before_network'): + torch.nn.LayerNorm_load_state_dict_before_network = torch.nn.LayerNorm._load_from_state_dict + if not hasattr(torch.nn, 'MultiheadAttention_forward_before_network'): torch.nn.MultiheadAttention_forward_before_network = torch.nn.MultiheadAttention.forward @@ -50,6 +62,10 @@ torch.nn.Linear.forward = networks.network_Linear_forward torch.nn.Linear._load_from_state_dict = networks.network_Linear_load_state_dict torch.nn.Conv2d.forward = networks.network_Conv2d_forward torch.nn.Conv2d._load_from_state_dict = networks.network_Conv2d_load_state_dict +torch.nn.GroupNorm.forward = networks.network_GroupNorm_forward +torch.nn.GroupNorm._load_from_state_dict = networks.network_GroupNorm_load_state_dict +torch.nn.LayerNorm.forward = networks.network_LayerNorm_forward +torch.nn.LayerNorm._load_from_state_dict = networks.network_LayerNorm_load_state_dict torch.nn.MultiheadAttention.forward = networks.network_MultiheadAttention_forward torch.nn.MultiheadAttention._load_from_state_dict = networks.network_MultiheadAttention_load_state_dict -- cgit v1.2.1 From d8419762c1454ba51baa710d9ce8e762efc056ef Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sun, 13 Aug 2023 15:07:37 +0300 Subject: Lora: output warnings in UI rather than fail for unfitting loras; switch to logging for error output in console --- extensions-builtin/Lora/scripts/lora_script.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'extensions-builtin/Lora/scripts') diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index dc307f8c..4c6e774a 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -23,9 +23,9 @@ def unload(): def before_ui(): ui_extra_networks.register_page(ui_extra_networks_lora.ExtraNetworksPageLora()) - extra_network = extra_networks_lora.ExtraNetworkLora() - extra_networks.register_extra_network(extra_network) - extra_networks.register_extra_network_alias(extra_network, "lyco") + networks.extra_network_lora = extra_networks_lora.ExtraNetworkLora() + extra_networks.register_extra_network(networks.extra_network_lora) + extra_networks.register_extra_network_alias(networks.extra_network_lora, "lyco") if not hasattr(torch.nn, 'Linear_forward_before_network'): -- cgit v1.2.1 From f01682ee01e81e8ef84fd6fffe8f7aa17233285d Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Tue, 15 Aug 2023 19:23:27 +0300 Subject: store patches for Lora in a specialized module --- extensions-builtin/Lora/scripts/lora_script.py | 52 +++----------------------- 1 file changed, 5 insertions(+), 47 deletions(-) (limited to 'extensions-builtin/Lora/scripts') diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 4c6e774a..546fb55e 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -7,17 +7,14 @@ from fastapi import FastAPI import network import networks import lora # noqa:F401 +import lora_patches import extra_networks_lora import ui_extra_networks_lora -from modules import script_callbacks, ui_extra_networks, extra_networks, shared +from modules import script_callbacks, ui_extra_networks, extra_networks, shared, patches + def unload(): - torch.nn.Linear.forward = torch.nn.Linear_forward_before_network - torch.nn.Linear._load_from_state_dict = torch.nn.Linear_load_state_dict_before_network - torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_network - torch.nn.Conv2d._load_from_state_dict = torch.nn.Conv2d_load_state_dict_before_network - torch.nn.MultiheadAttention.forward = torch.nn.MultiheadAttention_forward_before_network - torch.nn.MultiheadAttention._load_from_state_dict = torch.nn.MultiheadAttention_load_state_dict_before_network + networks.originals.undo() def before_ui(): @@ -28,46 +25,7 @@ def before_ui(): extra_networks.register_extra_network_alias(networks.extra_network_lora, "lyco") -if not hasattr(torch.nn, 'Linear_forward_before_network'): - torch.nn.Linear_forward_before_network = torch.nn.Linear.forward - -if not hasattr(torch.nn, 'Linear_load_state_dict_before_network'): - torch.nn.Linear_load_state_dict_before_network = torch.nn.Linear._load_from_state_dict - -if not hasattr(torch.nn, 'Conv2d_forward_before_network'): - torch.nn.Conv2d_forward_before_network = torch.nn.Conv2d.forward - -if not hasattr(torch.nn, 'Conv2d_load_state_dict_before_network'): - torch.nn.Conv2d_load_state_dict_before_network = torch.nn.Conv2d._load_from_state_dict - -if not hasattr(torch.nn, 'GroupNorm_forward_before_network'): - torch.nn.GroupNorm_forward_before_network = torch.nn.GroupNorm.forward - -if not hasattr(torch.nn, 'GroupNorm_load_state_dict_before_network'): - torch.nn.GroupNorm_load_state_dict_before_network = torch.nn.GroupNorm._load_from_state_dict - -if not hasattr(torch.nn, 'LayerNorm_forward_before_network'): - torch.nn.LayerNorm_forward_before_network = torch.nn.LayerNorm.forward - -if not hasattr(torch.nn, 'LayerNorm_load_state_dict_before_network'): - torch.nn.LayerNorm_load_state_dict_before_network = torch.nn.LayerNorm._load_from_state_dict - -if not hasattr(torch.nn, 'MultiheadAttention_forward_before_network'): - torch.nn.MultiheadAttention_forward_before_network = torch.nn.MultiheadAttention.forward - -if not hasattr(torch.nn, 'MultiheadAttention_load_state_dict_before_network'): - torch.nn.MultiheadAttention_load_state_dict_before_network = torch.nn.MultiheadAttention._load_from_state_dict - -torch.nn.Linear.forward = networks.network_Linear_forward -torch.nn.Linear._load_from_state_dict = networks.network_Linear_load_state_dict -torch.nn.Conv2d.forward = networks.network_Conv2d_forward -torch.nn.Conv2d._load_from_state_dict = networks.network_Conv2d_load_state_dict -torch.nn.GroupNorm.forward = networks.network_GroupNorm_forward -torch.nn.GroupNorm._load_from_state_dict = networks.network_GroupNorm_load_state_dict -torch.nn.LayerNorm.forward = networks.network_LayerNorm_forward -torch.nn.LayerNorm._load_from_state_dict = networks.network_LayerNorm_load_state_dict -torch.nn.MultiheadAttention.forward = networks.network_MultiheadAttention_forward -torch.nn.MultiheadAttention._load_from_state_dict = networks.network_MultiheadAttention_load_state_dict +networks.originals = lora_patches.LoraPatches() script_callbacks.on_model_loaded(networks.assign_network_names_to_compvis_modules) script_callbacks.on_script_unloaded(unload) -- cgit v1.2.1 From 85fcb7b8dfe7b3dd06931943f095c77f1043dc25 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Tue, 15 Aug 2023 19:24:55 +0300 Subject: lint --- extensions-builtin/Lora/scripts/lora_script.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'extensions-builtin/Lora/scripts') diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 546fb55e..ef23968c 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -1,6 +1,5 @@ import re -import torch import gradio as gr from fastapi import FastAPI @@ -10,7 +9,7 @@ import lora # noqa:F401 import lora_patches import extra_networks_lora import ui_extra_networks_lora -from modules import script_callbacks, ui_extra_networks, extra_networks, shared, patches +from modules import script_callbacks, ui_extra_networks, extra_networks, shared def unload(): -- cgit v1.2.1