aboutsummaryrefslogtreecommitdiff
path: root/modules
diff options
context:
space:
mode:
authorGreendayle <81877451+Greendayle@users.noreply.github.com>2022-10-08 18:28:22 +0200
committerGitHub <noreply@github.com>2022-10-08 18:28:22 +0200
commit0ec80f0125a14c03ac860279f40c0c062dbde0cf (patch)
treef51f04feecca9006a6a40e622bae3d6ae997e5eb /modules
parent01f8cb44474e454903c11718e6a4f33dbde34bb8 (diff)
parent3061cdb7b610d4ba7f1ea695d9d6364b591e5bc7 (diff)
Merge branch 'master' into dev/deepdanbooru
Diffstat (limited to 'modules')
-rw-r--r--modules/prompt_parser.py9
-rw-r--r--modules/sd_hijack.py6
-rw-r--r--modules/sd_hijack_optimizations.py5
-rw-r--r--modules/shared.py1
4 files changed, 17 insertions, 4 deletions
diff --git a/modules/prompt_parser.py b/modules/prompt_parser.py
index 15666073..919d5d31 100644
--- a/modules/prompt_parser.py
+++ b/modules/prompt_parser.py
@@ -13,13 +13,14 @@ import lark
schedule_parser = lark.Lark(r"""
!start: (prompt | /[][():]/+)*
-prompt: (emphasized | scheduled | plain | WHITESPACE)*
+prompt: (emphasized | scheduled | alternate | plain | WHITESPACE)*
!emphasized: "(" prompt ")"
| "(" prompt ":" prompt ")"
| "[" prompt "]"
scheduled: "[" [prompt ":"] prompt ":" [WHITESPACE] NUMBER "]"
+alternate: "[" prompt ("|" prompt)+ "]"
WHITESPACE: /\s+/
-plain: /([^\\\[\]():]|\\.)+/
+plain: /([^\\\[\]():|]|\\.)+/
%import common.SIGNED_NUMBER -> NUMBER
""")
@@ -59,6 +60,8 @@ def get_learned_conditioning_prompt_schedules(prompts, steps):
tree.children[-1] *= steps
tree.children[-1] = min(steps, int(tree.children[-1]))
l.append(tree.children[-1])
+ def alternate(self, tree):
+ l.extend(range(1, steps+1))
CollectSteps().visit(tree)
return sorted(set(l))
@@ -67,6 +70,8 @@ def get_learned_conditioning_prompt_schedules(prompts, steps):
def scheduled(self, args):
before, after, _, when = args
yield before or () if step <= when else after
+ def alternate(self, args):
+ yield next(args[(step - 1)%len(args)])
def start(self, args):
def flatten(x):
if type(x) == str:
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index ed271976..307cc67d 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -22,12 +22,16 @@ def apply_optimizations():
undo_optimizations()
ldm.modules.diffusionmodules.model.nonlinearity = silu
- if cmd_opts.xformers and shared.xformers_available and not torch.version.hip:
+
+ if cmd_opts.force_enable_xformers or (cmd_opts.xformers and shared.xformers_available and torch.version.cuda and torch.cuda.get_device_capability(shared.device) == (8, 6)):
+ print("Applying xformers cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.xformers_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.xformers_attnblock_forward
elif cmd_opts.opt_split_attention_v1:
+ print("Applying v1 cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward_v1
elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
+ print("Applying cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.cross_attention_attnblock_forward
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py
index d23d733b..dba21192 100644
--- a/modules/sd_hijack_optimizations.py
+++ b/modules/sd_hijack_optimizations.py
@@ -211,6 +211,7 @@ def cross_attention_attnblock_forward(self, x):
return h3
def xformers_attnblock_forward(self, x):
+ try:
h_ = x
h_ = self.norm(h_)
q1 = self.q(h_).contiguous()
@@ -218,4 +219,6 @@ def xformers_attnblock_forward(self, x):
v = self.v(h_).contiguous()
out = xformers.ops.memory_efficient_attention(q1, k1, v)
out = self.proj_out(out)
- return x+out
+ return x + out
+ except NotImplementedError:
+ return cross_attention_attnblock_forward(self, x)
diff --git a/modules/shared.py b/modules/shared.py
index c87b726e..026dce47 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -44,6 +44,7 @@ parser.add_argument("--scunet-models-path", type=str, help="Path to directory wi
parser.add_argument("--swinir-models-path", type=str, help="Path to directory with SwinIR model file(s).", default=os.path.join(models_path, 'SwinIR'))
parser.add_argument("--ldsr-models-path", type=str, help="Path to directory with LDSR model file(s).", default=os.path.join(models_path, 'LDSR'))
parser.add_argument("--xformers", action='store_true', help="enable xformers for cross attention layers")
+parser.add_argument("--force-enable-xformers", action='store_true', help="enable xformers for cross attention layers regardless of whether the checking code thinks you can run it; do not make bug reports if this fails to work")
parser.add_argument("--deepdanbooru", action='store_true', help="enable deepdanbooru interrogator")
parser.add_argument("--opt-split-attention", action='store_true', help="force-enables cross-attention layer optimization. By default, it's on for torch.cuda and off for other torch devices.")
parser.add_argument("--disable-opt-split-attention", action='store_true', help="force-disables cross-attention layer optimization")