summaryrefslogtreecommitdiffstats
path: root/train_ti.py
diff options
context:
space:
mode:
authorVolpeon <git@volpeon.ink>2023-03-21 13:46:36 +0100
committerVolpeon <git@volpeon.ink>2023-03-21 13:46:36 +0100
commitf5e0e98f6df9260a93fb650a0b97c85eb87b0fd3 (patch)
tree0d061f5fd8950d7ca7e0198731ee58980859dd18 /train_ti.py
parentRestore min SNR (diff)
downloadtextual-inversion-diff-f5e0e98f6df9260a93fb650a0b97c85eb87b0fd3.tar.gz
textual-inversion-diff-f5e0e98f6df9260a93fb650a0b97c85eb87b0fd3.tar.bz2
textual-inversion-diff-f5e0e98f6df9260a93fb650a0b97c85eb87b0fd3.zip
Fixed SNR weighting, re-enabled xformers
Diffstat (limited to 'train_ti.py')
-rw-r--r--train_ti.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/train_ti.py b/train_ti.py
index fd23517..2e92ae4 100644
--- a/train_ti.py
+++ b/train_ti.py
@@ -547,8 +547,8 @@ def main():
547 tokenizer.set_dropout(args.vector_dropout) 547 tokenizer.set_dropout(args.vector_dropout)
548 548
549 vae.enable_slicing() 549 vae.enable_slicing()
550 # vae.set_use_memory_efficient_attention_xformers(True) 550 vae.set_use_memory_efficient_attention_xformers(True)
551 # unet.enable_xformers_memory_efficient_attention() 551 unet.enable_xformers_memory_efficient_attention()
552 # unet = torch.compile(unet) 552 # unet = torch.compile(unet)
553 553
554 if args.gradient_checkpointing: 554 if args.gradient_checkpointing: