From 16b92605a59d59c65789c89b54bb97da51908056 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Tue, 21 Feb 2023 09:09:50 +0100 Subject: Embedding normalization: Ignore tensors with grad = 0 --- train_lora.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) (limited to 'train_lora.py') diff --git a/train_lora.py b/train_lora.py index db5330a..a06591d 100644 --- a/train_lora.py +++ b/train_lora.py @@ -248,7 +248,7 @@ def parse_args(): "--optimizer", type=str, default="adam", - help='Optimizer to use ["adam", "adam8bit", "lion"]' + help='Optimizer to use ["adam", "adam8bit"]' ) parser.add_argument( "--adam_beta1", @@ -419,7 +419,7 @@ def main(): save_args(output_dir, args) tokenizer, text_encoder, vae, unet, noise_scheduler, sample_scheduler, embeddings = get_models( - args.pretrained_model_name_or_path, noise_scheduler="deis") + args.pretrained_model_name_or_path) vae.enable_slicing() vae.set_use_memory_efficient_attention_xformers(True) @@ -488,13 +488,6 @@ def main(): eps=args.adam_epsilon, amsgrad=args.adam_amsgrad, ) - elif args.optimizer == 'lion': - try: - from lion_pytorch import Lion - except ImportError: - raise ImportError("To use Lion, please install the lion_pytorch library: `pip install lion_pytorch`.") - - create_optimizer = partial(Lion, use_triton=True) else: raise ValueError(f"Unknown --optimizer \"{args.optimizer}\"") -- cgit v1.2.3-54-g00ecf