From 55a12f2c683b2ecfa4fc8b4015462ad2798abda5 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Tue, 16 May 2023 16:48:51 +0200 Subject: Fix LoRA training with DAdan --- training/functional.py | 4 ++-- training/sampler.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'training') diff --git a/training/functional.py b/training/functional.py index 10560e5..fd3f9f4 100644 --- a/training/functional.py +++ b/training/functional.py @@ -710,8 +710,8 @@ def train( vae = torch.compile(vae, backend='hidet') if compile_unet: - # unet = torch.compile(unet, backend='hidet') - unet = torch.compile(unet, mode="reduce-overhead") + unet = torch.compile(unet, backend='hidet') + # unet = torch.compile(unet, mode="reduce-overhead") callbacks = strategy.callbacks( accelerator=accelerator, diff --git a/training/sampler.py b/training/sampler.py index 8afe255..bdb3e90 100644 --- a/training/sampler.py +++ b/training/sampler.py @@ -129,7 +129,7 @@ class LossSecondMomentResampler(LossAwareSampler): self._loss_history = np.zeros( [self.num_timesteps, history_per_term], dtype=np.float64 ) - self._loss_counts = np.zeros([self.num_timesteps], dtype=np.int) + self._loss_counts = np.zeros([self.num_timesteps], dtype=int) def weights(self): if not self._warmed_up(): -- cgit v1.2.3-70-g09d2