From 11e6f8f88483e6cfdccd66ad758ae1dfcfc0283b Mon Sep 17 00:00:00 2001 From: Volpeon Date: Sun, 2 Apr 2023 08:42:33 +0200 Subject: Lora: Only register params with grad to optimizer --- training/strategy/lora.py | 4 ---- 1 file changed, 4 deletions(-) (limited to 'training/strategy/lora.py') diff --git a/training/strategy/lora.py b/training/strategy/lora.py index 8905171..209785a 100644 --- a/training/strategy/lora.py +++ b/training/strategy/lora.py @@ -139,10 +139,6 @@ def lora_prepare( train_dataloader: DataLoader, val_dataloader: Optional[DataLoader], lr_scheduler: torch.optim.lr_scheduler._LRScheduler, - lora_rank: int = 4, - lora_alpha: int = 32, - lora_dropout: float = 0, - lora_bias: str = "none", **kwargs ): return accelerator.prepare(text_encoder, unet, optimizer, train_dataloader, val_dataloader, lr_scheduler) + ({},) -- cgit v1.2.3-54-g00ecf