diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-01-23 16:01:53 +0300 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-01-23 16:01:53 +0300 |
commit | 3fa482076a5f07d81d37d58a31b8c4fe3a740843 (patch) | |
tree | 4f31fe62e676bc662d56b1674cfcf4b78f4444e2 /modules/shared.py | |
parent | 194cbd065e4644e986889b78a5a949e075b610e8 (diff) | |
parent | 3262e825cc542ff634e6ba2e3a162eafdc6c1bba (diff) |
Merge remote-tracking branch 'takuma104/xformers-flash-attention'
Diffstat (limited to 'modules/shared.py')
-rw-r--r-- | modules/shared.py | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/modules/shared.py b/modules/shared.py index cb73bf31..a644c0be 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -57,6 +57,7 @@ parser.add_argument("--realesrgan-models-path", type=str, help="Path to director parser.add_argument("--clip-models-path", type=str, help="Path to directory with CLIP model file(s).", default=None)
parser.add_argument("--xformers", action='store_true', help="enable xformers for cross attention layers")
parser.add_argument("--force-enable-xformers", action='store_true', help="enable xformers for cross attention layers regardless of whether the checking code thinks you can run it; do not make bug reports if this fails to work")
+parser.add_argument("--xformers-flash-attention", action='store_true', help="enable xformers with Flash Attention to improve reproducibility (supported for SD2.x or variant only)")
parser.add_argument("--deepdanbooru", action='store_true', help="does not do anything")
parser.add_argument("--opt-split-attention", action='store_true', help="force-enables Doggettx's cross-attention layer optimization. By default, it's on for torch cuda.")
parser.add_argument("--opt-sub-quad-attention", action='store_true', help="enable memory efficient sub-quadratic cross-attention layer optimization")
|