aboutsummaryrefslogtreecommitdiff
path: root/modules/sd_models_xl.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-13 11:35:52 +0300
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-13 11:35:52 +0300
commit594c8e7b263d9b37f4b18b56b159aeb6d1bba1b4 (patch)
tree274143ec746dcc454c3b0b5b094abf688d2da676 /modules/sd_models_xl.py
parent21aec6f567f52271efbbe33a2ab6561f9a47b787 (diff)
fix CLIP doing the unneeded normalization
revert SD2.1 back to use the original repo add SDXL's force_zero_embeddings to negative prompt
Diffstat (limited to 'modules/sd_models_xl.py')
-rw-r--r--modules/sd_models_xl.py3
1 files changed, 2 insertions, 1 deletions
diff --git a/modules/sd_models_xl.py b/modules/sd_models_xl.py
index 1dd4459f..b799ff46 100644
--- a/modules/sd_models_xl.py
+++ b/modules/sd_models_xl.py
@@ -22,7 +22,8 @@ def get_learned_conditioning(self: sgm.models.diffusion.DiffusionEngine, batch:
"target_size_as_tuple": torch.tensor([height, width]).repeat(len(batch), 1).to(devices.device, devices.dtype),
}
- c = self.conditioner(sdxl_conds)
+ force_zero_negative_prompt = getattr(batch, 'is_negative_prompt', False) and all(x == '' for x in batch)
+ c = self.conditioner(sdxl_conds, force_zero_embeddings=['txt'] if force_zero_negative_prompt else [])
return c