From 55a12f2c683b2ecfa4fc8b4015462ad2798abda5 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Tue, 16 May 2023 16:48:51 +0200 Subject: Fix LoRA training with DAdan --- training/functional.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'training/functional.py') diff --git a/training/functional.py b/training/functional.py index 10560e5..fd3f9f4 100644 --- a/training/functional.py +++ b/training/functional.py @@ -710,8 +710,8 @@ def train( vae = torch.compile(vae, backend='hidet') if compile_unet: - # unet = torch.compile(unet, backend='hidet') - unet = torch.compile(unet, mode="reduce-overhead") + unet = torch.compile(unet, backend='hidet') + # unet = torch.compile(unet, mode="reduce-overhead") callbacks = strategy.callbacks( accelerator=accelerator, -- cgit v1.2.3-54-g00ecf