aboutsummaryrefslogtreecommitdiff
path: root/modules/shared.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-01-06 08:52:06 +0300
committerAUTOMATIC <16777216c@gmail.com>2023-01-06 08:52:06 +0300
commit683287d87f6401083a8d63eedc00ca7410214ca1 (patch)
tree2f76affb4b41044982a6108ba30dddc27f2891ac /modules/shared.py
parent88e01b237e60730338823ac4f11972a98d698ce7 (diff)
rework saving training params to file #6372
Diffstat (limited to 'modules/shared.py')
-rw-r--r--modules/shared.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/shared.py b/modules/shared.py
index f0e10b35..57e489d0 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -362,7 +362,7 @@ options_templates.update(options_section(('training', "Training"), {
"unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
"pin_memory": OptionInfo(False, "Turn on pin_memory for DataLoader. Makes training slightly faster but can increase memory usage."),
"save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training of embedding or HN can be resumed with the matching optim file."),
- "save_training_settings_to_txt": OptionInfo(False, "Save textual inversion and hypernet settings to a text file whenever training starts."),
+ "save_training_settings_to_txt": OptionInfo(True, "Save textual inversion and hypernet settings to a text file whenever training starts."),
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),