From 2016733814433ca2b69d10764bfa0ab4c7088782 Mon Sep 17 00:00:00 2001 From: brkirch Date: Tue, 7 Feb 2023 00:05:54 -0500 Subject: Apply hijacks in ddpm_edit for upcast sampling To avoid import errors, ddpm_edit hijacks are done after an instruct pix2pix model is loaded. --- modules/sd_hijack.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'modules/sd_hijack.py') diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 8fdc5990..fca418cd 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -104,6 +104,9 @@ class StableDiffusionModelHijack: m.cond_stage_model.model.token_embedding = EmbeddingsWithFixes(m.cond_stage_model.model.token_embedding, self) m.cond_stage_model = sd_hijack_open_clip.FrozenOpenCLIPEmbedderWithCustomWords(m.cond_stage_model, self) + if m.cond_stage_key == "edit": + sd_hijack_unet.hijack_ddpm_edit() + self.optimization_method = apply_optimizations() self.clip = m.cond_stage_model -- cgit v1.2.1 From c4bfd20f317243d7ceac6e2fbf30b18bbebd3e6d Mon Sep 17 00:00:00 2001 From: Shondoit Date: Thu, 12 Jan 2023 15:03:46 +0100 Subject: Hijack to add weighted_forward to model: return loss * weight map --- modules/sd_hijack.py | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) (limited to 'modules/sd_hijack.py') diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 8fdc5990..57ed5635 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -1,5 +1,6 @@ import torch from torch.nn.functional import silu +from types import MethodType import modules.textual_inversion.textual_inversion from modules import devices, sd_hijack_optimizations, shared, sd_hijack_checkpoint @@ -76,6 +77,54 @@ def fix_checkpoint(): pass +def weighted_loss(sd_model, pred, target, mean=True): + #Calculate the weight normally, but ignore the mean + loss = sd_model._old_get_loss(pred, target, mean=False) + + #Check if we have weights available + weight = getattr(sd_model, '_custom_loss_weight', None) + if weight is not None: + loss *= weight + + #Return the loss, as mean if specified + return loss.mean() if mean else loss + +def weighted_forward(sd_model, x, c, w, *args, **kwargs): + try: + #Temporarily append weights to a place accessible during loss calc + sd_model._custom_loss_weight = w + + #Replace 'get_loss' with a weight-aware one. Otherwise we need to reimplement 'forward' completely + #Keep 'get_loss', but don't overwrite the previous old_get_loss if it's already set + if not hasattr(sd_model, '_old_get_loss'): + sd_model._old_get_loss = sd_model.get_loss + sd_model.get_loss = MethodType(weighted_loss, sd_model) + + #Run the standard forward function, but with the patched 'get_loss' + return sd_model.forward(x, c, *args, **kwargs) + finally: + try: + #Delete temporary weights if appended + del sd_model._custom_loss_weight + except AttributeError as e: + pass + + #If we have an old loss function, reset the loss function to the original one + if hasattr(sd_model, '_old_get_loss'): + sd_model.get_loss = sd_model._old_get_loss + del sd_model._old_get_loss + +def apply_weighted_forward(sd_model): + #Add new function 'weighted_forward' that can be called to calc weighted loss + sd_model.weighted_forward = MethodType(weighted_forward, sd_model) + +def undo_weighted_forward(sd_model): + try: + del sd_model.weighted_forward + except AttributeError as e: + pass + + class StableDiffusionModelHijack: fixes = None comments = [] @@ -104,6 +153,8 @@ class StableDiffusionModelHijack: m.cond_stage_model.model.token_embedding = EmbeddingsWithFixes(m.cond_stage_model.model.token_embedding, self) m.cond_stage_model = sd_hijack_open_clip.FrozenOpenCLIPEmbedderWithCustomWords(m.cond_stage_model, self) + apply_weighted_forward(m) + self.optimization_method = apply_optimizations() self.clip = m.cond_stage_model @@ -132,6 +183,7 @@ class StableDiffusionModelHijack: m.cond_stage_model = m.cond_stage_model.wrapped undo_optimizations() + undo_weighted_forward(m) self.apply_circular(False) self.layers = None -- cgit v1.2.1