From 49e0a72e3c6c059abc3fb8b2dcbdda2553402018 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Sat, 8 Apr 2023 08:20:30 +0200 Subject: Fix --- training/strategy/lora.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/training/strategy/lora.py b/training/strategy/lora.py index 89269c0..cfdc504 100644 --- a/training/strategy/lora.py +++ b/training/strategy/lora.py @@ -100,9 +100,8 @@ def lora_strategy_callbacks( @torch.no_grad() def on_after_optimize(w, lrs: dict[str, float]): - lr = lrs["emb"] or lrs["0"] - - if use_emb_decay and w is not None: + if use_emb_decay and w is not None and "emb" in lrs: + lr = lrs["emb"] lambda_ = emb_decay * lr if lambda_ != 0: -- cgit v1.2.3-54-g00ecf