From eb6a92abda5893c975437026cdaf0ce0bfefe2a4 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Mon, 10 Apr 2023 10:34:12 +0200 Subject: Update --- train_lora.py | 66 +++++++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 44 insertions(+), 22 deletions(-) (limited to 'train_lora.py') diff --git a/train_lora.py b/train_lora.py index e81742a..4bbc64e 100644 --- a/train_lora.py +++ b/train_lora.py @@ -198,6 +198,11 @@ def parse_args(): default=None, help="The embeddings directory where Textual Inversion embeddings are stored.", ) + parser.add_argument( + "--train_dir_embeddings", + action="store_true", + help="Train embeddings loaded from embeddings directory.", + ) parser.add_argument( "--collection", type=str, @@ -439,6 +444,12 @@ def parse_args(): default=1, help="How often to save a checkpoint and sample image", ) + parser.add_argument( + "--sample_num", + type=int, + default=None, + help="How often to save a checkpoint and sample image (in number of samples)", + ) parser.add_argument( "--sample_image_size", type=int, @@ -681,27 +692,36 @@ def main(): embeddings.persist() print(f"Added {len(added_tokens)} aliases: {list(zip(alias_placeholder_tokens, added_tokens, alias_initializer_tokens, added_ids))}") + placeholder_token_ids = [] + if args.embeddings_dir is not None: embeddings_dir = Path(args.embeddings_dir) if not embeddings_dir.exists() or not embeddings_dir.is_dir(): raise ValueError("--embeddings_dir must point to an existing directory") added_tokens, added_ids = load_embeddings_from_dir(tokenizer, embeddings, embeddings_dir) - embeddings.persist() print(f"Added {len(added_tokens)} tokens from embeddings dir: {list(zip(added_tokens, added_ids))}") - placeholder_token_ids, initializer_token_ids = add_placeholder_tokens( - tokenizer=tokenizer, - embeddings=embeddings, - placeholder_tokens=args.placeholder_tokens, - initializer_tokens=args.initializer_tokens, - num_vectors=args.num_vectors, - initializer_noise=args.initializer_noise, - ) - stats = list(zip( - args.placeholder_tokens, placeholder_token_ids, args.initializer_tokens, initializer_token_ids - )) - print(f"Training embeddings: {stats}") + if args.train_dir_embeddings: + args.placeholder_tokens = added_tokens + placeholder_token_ids = added_ids + print("Training embeddings from embeddings dir") + else: + embeddings.persist() + + if not args.train_dir_embeddings: + placeholder_token_ids, initializer_token_ids = add_placeholder_tokens( + tokenizer=tokenizer, + embeddings=embeddings, + placeholder_tokens=args.placeholder_tokens, + initializer_tokens=args.initializer_tokens, + num_vectors=args.num_vectors, + initializer_noise=args.initializer_noise, + ) + stats = list(zip( + args.placeholder_tokens, placeholder_token_ids, args.initializer_tokens, initializer_token_ids + )) + print(f"Training embeddings: {stats}") if args.scale_lr: args.learning_rate_unet = ( @@ -897,6 +917,8 @@ def main(): args.num_train_steps / len(lora_datamodule.train_dataset) ) * args.gradient_accumulation_steps lora_sample_frequency = math.ceil(num_train_epochs * (lora_sample_frequency / args.num_train_steps)) + if args.sample_num is not None: + lora_sample_frequency = math.ceil(num_train_epochs / args.sample_num) params_to_optimize = [] group_labels = [] @@ -930,15 +952,6 @@ def main(): ] group_labels += ["unet", "text"] - lora_optimizer = create_optimizer(params_to_optimize) - - lora_lr_scheduler = create_lr_scheduler( - gradient_accumulation_steps=args.gradient_accumulation_steps, - optimizer=lora_optimizer, - num_training_steps_per_epoch=len(lora_datamodule.train_dataloader), - train_epochs=num_train_epochs, - ) - training_iter = 0 while True: @@ -952,6 +965,15 @@ def main(): print(f"============ LoRA cycle {training_iter} ============") print("") + lora_optimizer = create_optimizer(params_to_optimize) + + lora_lr_scheduler = create_lr_scheduler( + gradient_accumulation_steps=args.gradient_accumulation_steps, + optimizer=lora_optimizer, + num_training_steps_per_epoch=len(lora_datamodule.train_dataloader), + train_epochs=num_train_epochs, + ) + lora_project = f"lora_{training_iter}" lora_checkpoint_output_dir = output_dir / lora_project / "model" lora_sample_output_dir = output_dir / lora_project / "samples" -- cgit v1.2.3-54-g00ecf