summaryrefslogtreecommitdiffstats
path: root/train_lora.py
diff options
context:
space:
mode:
authorVolpeon <git@volpeon.ink>2023-02-21 11:50:11 +0100
committerVolpeon <git@volpeon.ink>2023-02-21 11:50:11 +0100
commit9d6252e63bac241e5c6191eb47adb51b84a5d782 (patch)
tree6cb649510b48ca33419af3721e630f1c06bf1ae2 /train_lora.py
parentEmbedding normalization: Ignore tensors with grad = 0 (diff)
downloadtextual-inversion-diff-9d6252e63bac241e5c6191eb47adb51b84a5d782.tar.gz
textual-inversion-diff-9d6252e63bac241e5c6191eb47adb51b84a5d782.tar.bz2
textual-inversion-diff-9d6252e63bac241e5c6191eb47adb51b84a5d782.zip
Don't rely on Accelerate for gradient accumulation
Diffstat (limited to 'train_lora.py')
-rw-r--r--train_lora.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/train_lora.py b/train_lora.py
index a06591d..d7c2de0 100644
--- a/train_lora.py
+++ b/train_lora.py
@@ -399,7 +399,6 @@ def main():
399 accelerator = Accelerator( 399 accelerator = Accelerator(
400 log_with=LoggerType.TENSORBOARD, 400 log_with=LoggerType.TENSORBOARD,
401 logging_dir=f"{output_dir}", 401 logging_dir=f"{output_dir}",
402 gradient_accumulation_steps=args.gradient_accumulation_steps,
403 mixed_precision=args.mixed_precision 402 mixed_precision=args.mixed_precision
404 ) 403 )
405 404
@@ -561,6 +560,7 @@ def main():
561 optimizer=optimizer, 560 optimizer=optimizer,
562 lr_scheduler=lr_scheduler, 561 lr_scheduler=lr_scheduler,
563 num_train_epochs=args.num_train_epochs, 562 num_train_epochs=args.num_train_epochs,
563 gradient_accumulation_steps=args.gradient_accumulation_steps,
564 sample_frequency=args.sample_frequency, 564 sample_frequency=args.sample_frequency,
565 # -- 565 # --
566 tokenizer=tokenizer, 566 tokenizer=tokenizer,