aboutsummaryrefslogtreecommitdiff
path: root/modules
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-26 15:07:56 +0300
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-26 15:08:12 +0300
commit055461ae41436e0fcbdf9f5c6b82c9441c6b5b5f (patch)
treee12c5c73edb22538efec3907edbcc986029f5b02 /modules
parent5c8f91b22975701af22d24f947af82e7d23264d5 (diff)
repair SDXL
Diffstat (limited to 'modules')
-rw-r--r--modules/sd_hijack_clip.py5
1 files changed, 5 insertions, 0 deletions
diff --git a/modules/sd_hijack_clip.py b/modules/sd_hijack_clip.py
index 990533fe..16a5500e 100644
--- a/modules/sd_hijack_clip.py
+++ b/modules/sd_hijack_clip.py
@@ -270,6 +270,8 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module):
z = self.encode_with_transformers(tokens)
+ pooled = getattr(z, 'pooled', None)
+
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
batch_multipliers = torch.asarray(batch_multipliers).to(devices.device)
original_mean = z.mean()
@@ -277,6 +279,9 @@ class FrozenCLIPEmbedderWithCustomWordsBase(torch.nn.Module):
new_mean = z.mean()
z = z * (original_mean / new_mean)
+ if pooled is not None:
+ z.pooled = pooled
+
return z