aboutsummaryrefslogtreecommitdiff
path: root/modules
diff options
context:
space:
mode:
authorC43H66N12O12S2 <36072735+C43H66N12O12S2@users.noreply.github.com>2022-10-07 05:22:28 +0300
committerGitHub <noreply@github.com>2022-10-07 05:22:28 +0300
commit2eb911b056ce6ff4434f673366782ed34f2b2f12 (patch)
tree97f0c341a39415dcbf167618194214d80200ebbd /modules
parentf174fb29228a04955fb951b32b0bab79e33ec2b8 (diff)
Update sd_hijack.py
Diffstat (limited to 'modules')
-rw-r--r--modules/sd_hijack.py13
1 files changed, 9 insertions, 4 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index a6fa890c..6221ed5a 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -20,12 +20,17 @@ diffusionmodules_model_AttnBlock_forward = ldm.modules.diffusionmodules.model.At
def apply_optimizations():
- ldm.modules.diffusionmodules.model.nonlinearity = silu
-
if cmd_opts.opt_split_attention_v1:
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward_v1
- elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
- ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward
+ if cmd_opts.opt_split_attention:
+ ldm.modules.attention_CrossAttention_forward = sd_hijack_optimizations.split_cross_attention_forward
+ ldm.modules.diffusionmodules.model.nonlinearity = sd_hijack_optimizations.nonlinearity_hijack
+ ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.cross_attention_attnblock_forward
+ elif not cmd_opts.disable_opt_xformers_attention:
+ ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.xformers_attention_forward
+ ldm.modules.attention.CrossAttention._maybe_init = sd_hijack_optimizations._maybe_init
+ ldm.modules.attention.CrossAttention.attention_op = None
+ ldm.modules.diffusionmodules.model.nonlinearity = sd_hijack_optimizations.nonlinearity_hijack
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.cross_attention_attnblock_forward