From 01eee0cb24f52ca78761b78917959e1c247eae94 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Sat, 1 Apr 2023 12:35:43 +0200 Subject: Add support for Adafactor, add TI initializer noise --- training/optimization.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'training/optimization.py') diff --git a/training/optimization.py b/training/optimization.py index 59ca950..53d0a6d 100644 --- a/training/optimization.py +++ b/training/optimization.py @@ -6,6 +6,7 @@ import torch from torch.optim.lr_scheduler import LambdaLR from diffusers.optimization import get_scheduler as get_scheduler_, get_cosine_with_hard_restarts_schedule_with_warmup +import transformers class OneCyclePhase(NamedTuple): @@ -148,6 +149,8 @@ def get_scheduler( num_training_steps=num_training_steps, num_cycles=cycles, ) + elif id == "adafactor": + lr_scheduler = transformers.optimization.AdafactorSchedule(optimizer, min_lr) else: lr_scheduler = get_scheduler_( id, -- cgit v1.2.3-54-g00ecf