From 55a12f2c683b2ecfa4fc8b4015462ad2798abda5 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Tue, 16 May 2023 16:48:51 +0200 Subject: Fix LoRA training with DAdan --- train_lora.py | 58 +++++++++++++++++++++++++++------------------------------- 1 file changed, 27 insertions(+), 31 deletions(-) (limited to 'train_lora.py') diff --git a/train_lora.py b/train_lora.py index 12d7e72..c74dd8f 100644 --- a/train_lora.py +++ b/train_lora.py @@ -48,8 +48,8 @@ warnings.filterwarnings('ignore') torch.backends.cuda.matmul.allow_tf32 = True torch.backends.cudnn.benchmark = True -torch._dynamo.config.log_level = logging.WARNING -# torch._dynamo.config.suppress_errors = True +# torch._dynamo.config.log_level = logging.WARNING +torch._dynamo.config.suppress_errors = True hidet.torch.dynamo_config.use_tensor_core(True) hidet.torch.dynamo_config.search_space(0) @@ -1143,6 +1143,28 @@ def main(): avg_loss_val = AverageMeter() avg_acc_val = AverageMeter() + params_to_optimize = [ + { + "params": ( + param + for param in unet.parameters() + if param.requires_grad + ), + "lr": learning_rate_unet, + }, + { + "params": ( + param + for param in text_encoder.parameters() + if param.requires_grad + ), + "lr": learning_rate_text, + } + ] + group_labels = ["unet", "text"] + + lora_optimizer = create_optimizer(params_to_optimize) + while True: if len(auto_cycles) != 0: response = auto_cycles.pop(0) @@ -1182,35 +1204,9 @@ def main(): print("") print(f"============ LoRA cycle {training_iter + 1}: {response} ============") print("") - - params_to_optimize = [] - group_labels = [] - - params_to_optimize.append({ - "params": ( - param - for param in unet.parameters() - if param.requires_grad - ), - "lr": learning_rate_unet, - }) - group_labels.append("unet") - - if training_iter < args.train_text_encoder_cycles: - params_to_optimize.append({ - "params": ( - param - for param in itertools.chain( - text_encoder.text_model.encoder.parameters(), - text_encoder.text_model.final_layer_norm.parameters(), - ) - if param.requires_grad - ), - "lr": learning_rate_text, - }) - group_labels.append("text") - - lora_optimizer = create_optimizer(params_to_optimize) + + for group, lr in zip(lora_optimizer.param_groups, [learning_rate_unet, learning_rate_text]): + group['lr'] = lr lora_lr_scheduler = create_lr_scheduler( lr_scheduler, -- cgit v1.2.3-54-g00ecf