aboutsummaryrefslogtreecommitdiff
path: root/modules/shared.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-01-04 14:09:13 +0300
committerGitHub <noreply@github.com>2023-01-04 14:09:13 +0300
commit7bbd984dda22b1392b34ca5d48b7a412216e047b (patch)
tree44f23ebb82ac6c9f595f00c1beb717049447ee96 /modules/shared.py
parent545ae8cb1c17a038a5bfd126546acf4b9cab334c (diff)
parentbddebe09edeb6a18f2c06986d5658a7be3a563ea (diff)
Merge pull request #6253 from Shondoit/ti-optim
Save Optimizer next to TI embedding
Diffstat (limited to 'modules/shared.py')
-rw-r--r--modules/shared.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/shared.py b/modules/shared.py
index 6a217865..9c9fd857 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -356,7 +356,7 @@ options_templates.update(options_section(('system', "System"), {
options_templates.update(options_section(('training', "Training"), {
"unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
"pin_memory": OptionInfo(False, "Turn on pin_memory for DataLoader. Makes training slightly faster but can increase memory usage."),
- "save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training can be resumed with HN itself and matching optim file."),
+ "save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training of embedding or HN can be resumed with the matching optim file."),
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),