From 6b9795849d497b41514aa9462690cf7c2802e4f6 Mon Sep 17 00:00:00 2001 From: hako-mikan <122196982+hako-mikan@users.noreply.github.com> Date: Thu, 9 Nov 2023 20:23:37 +0900 Subject: Fix model switch bug --- extensions-builtin/Lora/networks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index 96f935b2..a21ea0fa 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -418,7 +418,7 @@ def network_forward(module, input, original_forward): def network_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]): self.network_current_names = () self.network_weights_backup = None - + self.network_bias_backup = None def network_Linear_forward(self, input): if shared.opts.lora_functional: -- cgit v1.2.1 From 6b3f7039b6b71132349d294e884be82ca7c88d87 Mon Sep 17 00:00:00 2001 From: hako-mikan <122196982+hako-mikan@users.noreply.github.com> Date: Fri, 9 Feb 2024 23:57:46 +0900 Subject: add option --- modules/sd_hijack_clip.py | 4 +++- modules/shared_options.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/sd_hijack_clip.py b/modules/sd_hijack_clip.py index 8f29057a..673b29ea 100644 --- a/modules/sd_hijack_clip.py +++ b/modules/sd_hijack_clip.py @@ -279,7 +279,9 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module): original_mean = z.mean() z = z * batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape) new_mean = z.mean() - z = z * (original_mean / new_mean) + + if not getattr(opts, "disable_normalize_embeddings", False): + z = z * (original_mean / new_mean) if pooled is not None: z.pooled = pooled diff --git a/modules/shared_options.py b/modules/shared_options.py index d2e86ff1..0b2d7ea3 100644 --- a/modules/shared_options.py +++ b/modules/shared_options.py @@ -150,6 +150,7 @@ options_templates.update(options_section(('sd', "Stable Diffusion", "sd"), { "sd_unet": OptionInfo("Automatic", "SD Unet", gr.Dropdown, lambda: {"choices": shared_items.sd_unet_items()}, refresh=shared_items.refresh_unet_list).info("choose Unet model: Automatic = use one with same filename as checkpoint; None = use Unet from checkpoint"), "enable_quantization": OptionInfo(False, "Enable quantization in K samplers for sharper and cleaner results. This may change existing seeds").needs_reload_ui(), "enable_emphasis": OptionInfo(True, "Enable emphasis").info("use (text) to make model pay more attention to text and [text] to make it pay less attention"), + "disable_normalize_embeddings": OptionInfo(False, "Disable normalize embeddings").info("Do not normalize embeddings after calculating emphasis. It can be expected to be effective in preventing artifacts in SDXL."), "enable_batch_seeds": OptionInfo(True, "Make K-diffusion samplers produce same images in a batch as when making a single image"), "comma_padding_backtrack": OptionInfo(20, "Prompt word wrap length limit", gr.Slider, {"minimum": 0, "maximum": 74, "step": 1}).info("in tokens - for texts shorter than specified, if they don't fit into 75 token limit, move them to the next 75 token chunk"), "CLIP_stop_at_last_layers": OptionInfo(1, "Clip skip", gr.Slider, {"minimum": 1, "maximum": 12, "step": 1}, infotext="Clip skip").link("wiki", "https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Features#clip-skip").info("ignore last layers of CLIP network; 1 ignores none, 2 ignores one layer"), -- cgit v1.2.1 From c3c88ca8b46a19f48104d0421e14be28853b2a92 Mon Sep 17 00:00:00 2001 From: hako-mikan <122196982+hako-mikan@users.noreply.github.com> Date: Sat, 10 Feb 2024 00:18:08 +0900 Subject: Update sd_hijack_clip.py --- modules/sd_hijack_clip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack_clip.py b/modules/sd_hijack_clip.py index 673b29ea..89634fbf 100644 --- a/modules/sd_hijack_clip.py +++ b/modules/sd_hijack_clip.py @@ -279,7 +279,7 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module): original_mean = z.mean() z = z * batch_multipliers.reshape(batch_multipliers.shape + (1,)).expand(z.shape) new_mean = z.mean() - + if not getattr(opts, "disable_normalize_embeddings", False): z = z * (original_mean / new_mean) -- cgit v1.2.1