aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFampai <unknown>2022-10-09 04:32:40 -0400
committerAUTOMATIC1111 <16777216c@gmail.com>2022-10-09 22:31:23 +0300
commitad3ae441081155dcd4fde805279e5082ca264695 (patch)
tree018cabe6cab1c4fce8b80ffa38e84f347165a817
parentec2bd9be75865c9f3a8c898163ab381688c03b6e (diff)
Updated code for legibility
-rw-r--r--modules/sd_hijack.py7
1 files changed, 5 insertions, 2 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index 4a2d2153..7793d25b 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -284,8 +284,11 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
tmp = -opts.CLIP_stop_at_last_layers
outputs = self.wrapped.transformer(input_ids=tokens, position_ids=position_ids, output_hidden_states=tmp)
- z = outputs.hidden_states[tmp]
- z = self.wrapped.transformer.text_model.final_layer_norm(z)
+ if tmp < -1:
+ z = outputs.hidden_states[tmp]
+ z = self.wrapped.transformer.text_model.final_layer_norm(z)
+ else:
+ z = outputs.last_hidden_state
# restoring original mean is likely not correct, but it seems to work well to prevent artifacts that happen otherwise
batch_multipliers_of_same_length = [x + [1.0] * (target_token_count - len(x)) for x in batch_multipliers]