From 8ea42d6ce516b7d0c43fc7a1e3d5e9db33d72c68 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Tue, 16 May 2023 09:25:05 +0200 Subject: LoRA: Apply to out layers as well --- train_lora.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/train_lora.py b/train_lora.py index 167b17a..70f0dc8 100644 --- a/train_lora.py +++ b/train_lora.py @@ -32,8 +32,10 @@ from training.util import AverageMeter, save_args from util.files import load_config, load_embeddings_from_dir # https://github.com/huggingface/peft/blob/main/examples/lora_dreambooth/train_dreambooth.py -UNET_TARGET_MODULES = ["to_q", "to_v", "query", "value"] -TEXT_ENCODER_TARGET_MODULES = ["q_proj", "v_proj"] +UNET_TARGET_MODULES_ORIG = ["to_q", "to_v", "query", "value"] +UNET_TARGET_MODULES = UNET_TARGET_MODULES_ORIG + ["to_out.0"] # ["to_k", key] +TEXT_ENCODER_TARGET_MODULES_ORIG = ["q_proj", "v_proj"] +TEXT_ENCODER_TARGET_MODULES = TEXT_ENCODER_TARGET_MODULES_ORIG + ["out_proj"] # ["k_proj"] TEXT_ENCODER_TARGET_MODULES_WITH_EMBEDDING = TEXT_ENCODER_TARGET_MODULES + ["token_embedding"] -- cgit v1.2.3-70-g09d2