aboutsummaryrefslogtreecommitdiff
path: root/modules/shared.py
diff options
context:
space:
mode:
authorShondoit <shondoit@gmail.com>2023-01-03 10:26:37 +0100
committerShondoit <shondoit@gmail.com>2023-01-03 13:30:24 +0100
commitbddebe09edeb6a18f2c06986d5658a7be3a563ea (patch)
tree0b661b77cc1c4a3684c2c5f02993aa3ed933f6e8 /modules/shared.py
parentc0ee1488702d5a6ae35fbf7e0422f9f685394920 (diff)
Save Optimizer next to TI embedding
Also add check to load only .PT and .BIN files as embeddings. (since we add .optim files in the same directory)
Diffstat (limited to 'modules/shared.py')
-rw-r--r--modules/shared.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/shared.py b/modules/shared.py
index 23657a93..c541d18c 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -355,7 +355,7 @@ options_templates.update(options_section(('system', "System"), {
options_templates.update(options_section(('training', "Training"), {
"unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
"pin_memory": OptionInfo(False, "Turn on pin_memory for DataLoader. Makes training slightly faster but can increase memory usage."),
- "save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training can be resumed with HN itself and matching optim file."),
+ "save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training of embedding or HN can be resumed with the matching optim file."),
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),