summaryrefslogtreecommitdiffstats
path: root/train_dreambooth.py
diff options
context:
space:
mode:
authorVolpeon <git@volpeon.ink>2023-03-31 14:54:15 +0200
committerVolpeon <git@volpeon.ink>2023-03-31 14:54:15 +0200
commit5acae38f9b995fbaeb42a1504cce88bd18154f12 (patch)
tree28abdb148fc133782fb5ee55b157cf1b12327c9d /train_dreambooth.py
parentFix (diff)
downloadtextual-inversion-diff-5acae38f9b995fbaeb42a1504cce88bd18154f12.tar.gz
textual-inversion-diff-5acae38f9b995fbaeb42a1504cce88bd18154f12.tar.bz2
textual-inversion-diff-5acae38f9b995fbaeb42a1504cce88bd18154f12.zip
Fix
Diffstat (limited to 'train_dreambooth.py')
-rw-r--r--train_dreambooth.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/train_dreambooth.py b/train_dreambooth.py
index d2e60ec..0634376 100644
--- a/train_dreambooth.py
+++ b/train_dreambooth.py
@@ -593,7 +593,6 @@ def main():
593 dropout=args.tag_dropout, 593 dropout=args.tag_dropout,
594 shuffle=not args.no_tag_shuffle, 594 shuffle=not args.no_tag_shuffle,
595 template_key=args.train_data_template, 595 template_key=args.train_data_template,
596 placeholder_tokens=args.placeholder_tokens,
597 valid_set_size=args.valid_set_size, 596 valid_set_size=args.valid_set_size,
598 train_set_pad=args.train_set_pad, 597 train_set_pad=args.train_set_pad,
599 valid_set_pad=args.valid_set_pad, 598 valid_set_pad=args.valid_set_pad,
@@ -604,9 +603,10 @@ def main():
604 datamodule.setup() 603 datamodule.setup()
605 604
606 num_train_epochs = args.num_train_epochs 605 num_train_epochs = args.num_train_epochs
606 sample_frequency = args.sample_frequency
607 if num_train_epochs is None: 607 if num_train_epochs is None:
608 num_train_epochs = math.ceil(args.num_train_steps / len(datamodule.train_dataset)) 608 num_train_epochs = math.ceil(args.num_train_steps / len(datamodule.train_dataset))
609 sample_frequency = math.ceil(num_train_epochs * (args.sample_frequency / args.num_train_steps)) 609 sample_frequency = math.ceil(num_train_epochs * (sample_frequency / args.num_train_steps))
610 610
611 params_to_optimize = (unet.parameters(), ) 611 params_to_optimize = (unet.parameters(), )
612 if args.train_text_encoder_epochs != 0: 612 if args.train_text_encoder_epochs != 0: