summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVolpeon <git@volpeon.ink>2023-03-28 16:28:44 +0200
committerVolpeon <git@volpeon.ink>2023-03-28 16:28:44 +0200
commitb59611506d2eac1b280ada07db0bf9de15b41a26 (patch)
tree3f4a1e49f0db72443e45bc64d2c5b9d99ce055fc
parentSupport num_train_steps arg again (diff)
downloadtextual-inversion-diff-b59611506d2eac1b280ada07db0bf9de15b41a26.tar.gz
textual-inversion-diff-b59611506d2eac1b280ada07db0bf9de15b41a26.tar.bz2
textual-inversion-diff-b59611506d2eac1b280ada07db0bf9de15b41a26.zip
Fix
-rw-r--r--train_dreambooth.py2
-rw-r--r--train_lora.py2
-rw-r--r--train_ti.py2
3 files changed, 3 insertions, 3 deletions
diff --git a/train_dreambooth.py b/train_dreambooth.py
index acb8287..e3c8525 100644
--- a/train_dreambooth.py
+++ b/train_dreambooth.py
@@ -598,7 +598,7 @@ def main():
598 num_train_epochs = args.num_train_epochs 598 num_train_epochs = args.num_train_epochs
599 599
600 if num_train_epochs is None: 600 if num_train_epochs is None:
601 num_train_epochs = math.ceil(len(datamodule.train_dataset) / args.num_train_steps) 601 num_train_epochs = math.ceil(args.num_train_steps / len(datamodule.train_dataset))
602 602
603 params_to_optimize = (unet.parameters(), ) 603 params_to_optimize = (unet.parameters(), )
604 if args.train_text_encoder_epochs != 0: 604 if args.train_text_encoder_epochs != 0:
diff --git a/train_lora.py b/train_lora.py
index a9c6e52..6f8644b 100644
--- a/train_lora.py
+++ b/train_lora.py
@@ -630,7 +630,7 @@ def main():
630 num_train_epochs = args.num_train_epochs 630 num_train_epochs = args.num_train_epochs
631 631
632 if num_train_epochs is None: 632 if num_train_epochs is None:
633 num_train_epochs = math.ceil(len(datamodule.train_dataset) / args.num_train_steps) 633 num_train_epochs = math.ceil(args.num_train_steps / len(datamodule.train_dataset))
634 634
635 optimizer = create_optimizer( 635 optimizer = create_optimizer(
636 itertools.chain( 636 itertools.chain(
diff --git a/train_ti.py b/train_ti.py
index 7bcc72f..9c4ad93 100644
--- a/train_ti.py
+++ b/train_ti.py
@@ -761,7 +761,7 @@ def main():
761 num_train_epochs = args.num_train_epochs 761 num_train_epochs = args.num_train_epochs
762 762
763 if num_train_epochs is None: 763 if num_train_epochs is None:
764 num_train_epochs = math.ceil(len(datamodule.train_dataset) / args.num_train_steps) 764 num_train_epochs = math.ceil(args.num_train_steps / len(datamodule.train_dataset))
765 765
766 optimizer = create_optimizer( 766 optimizer = create_optimizer(
767 text_encoder.text_model.embeddings.temp_token_embedding.parameters(), 767 text_encoder.text_model.embeddings.temp_token_embedding.parameters(),