aboutsummaryrefslogtreecommitdiff
path: root/modules/processing.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-12-16 10:22:51 +0300
committerGitHub <noreply@github.com>2023-12-16 10:22:51 +0300
commitc121f8c31587a21020e8670664977f6f76e68905 (patch)
tree5db19664111d4264d4b018bee6557d7d6b0ec1c4 /modules/processing.py
parent60186c7b9d6034ff08f4fe9e213a495b5321302d (diff)
parent8edb9144cc76b39f3d68c0407b3bb990809d1b03 (diff)
Merge pull request #14031 from AUTOMATIC1111/test-fp8
A big improvement for dtype casting system with fp8 storage type and manual cast
Diffstat (limited to 'modules/processing.py')
-rw-r--r--modules/processing.py2
1 files changed, 2 insertions, 0 deletions
diff --git a/modules/processing.py b/modules/processing.py
index bea01ec6..179f2c0f 100644
--- a/modules/processing.py
+++ b/modules/processing.py
@@ -688,6 +688,8 @@ def create_infotext(p, all_prompts, all_seeds, all_subseeds, comments=None, iter
"Size": f"{p.width}x{p.height}",
"Model hash": p.sd_model_hash if opts.add_model_hash_to_info else None,
"Model": p.sd_model_name if opts.add_model_name_to_info else None,
+ "FP8 weight": opts.fp8_storage if devices.fp8 else None,
+ "Cache FP16 weight for LoRA": opts.cache_fp16_weight if devices.fp8 else None,
"VAE hash": p.sd_vae_hash if opts.add_vae_hash_to_info else None,
"VAE": p.sd_vae_name if opts.add_vae_name_to_info else None,
"Variation seed": (None if p.subseed_strength == 0 else (p.all_subseeds[0] if use_main_prompt else all_subseeds[index])),