summaryrefslogtreecommitdiffstats
path: root/training/functional.py
diff options
context:
space:
mode:
authorVolpeon <git@volpeon.ink>2023-05-16 16:48:51 +0200
committerVolpeon <git@volpeon.ink>2023-05-16 16:48:51 +0200
commit55a12f2c683b2ecfa4fc8b4015462ad2798abda5 (patch)
treefeeb3f9a041466e773bb5921cbf0adb208d60a49 /training/functional.py
parentAvoid model recompilation due to varying prompt lengths (diff)
downloadtextual-inversion-diff-55a12f2c683b2ecfa4fc8b4015462ad2798abda5.tar.gz
textual-inversion-diff-55a12f2c683b2ecfa4fc8b4015462ad2798abda5.tar.bz2
textual-inversion-diff-55a12f2c683b2ecfa4fc8b4015462ad2798abda5.zip
Fix LoRA training with DAdan
Diffstat (limited to 'training/functional.py')
-rw-r--r--training/functional.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/training/functional.py b/training/functional.py
index 10560e5..fd3f9f4 100644
--- a/training/functional.py
+++ b/training/functional.py
@@ -710,8 +710,8 @@ def train(
710 vae = torch.compile(vae, backend='hidet') 710 vae = torch.compile(vae, backend='hidet')
711 711
712 if compile_unet: 712 if compile_unet:
713 # unet = torch.compile(unet, backend='hidet') 713 unet = torch.compile(unet, backend='hidet')
714 unet = torch.compile(unet, mode="reduce-overhead") 714 # unet = torch.compile(unet, mode="reduce-overhead")
715 715
716 callbacks = strategy.callbacks( 716 callbacks = strategy.callbacks(
717 accelerator=accelerator, 717 accelerator=accelerator,