From 950f1f6bcbb1a767170cea590b828d8e3cdae882 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Fri, 23 Jun 2023 06:48:38 +0200 Subject: Update --- train_lora.py | 18 ------------------ 1 file changed, 18 deletions(-) (limited to 'train_lora.py') diff --git a/train_lora.py b/train_lora.py index 1ff25ff..fbec009 100644 --- a/train_lora.py +++ b/train_lora.py @@ -157,12 +157,6 @@ def parse_args(): default=[], help="Tokens to create an alias for.", ) - parser.add_argument( - "--inverted_initializer_tokens", - type=str, - nargs="*", - help="A token to use as initializer word.", - ) parser.add_argument( "--num_vectors", type=int, nargs="*", help="Number of vectors per embedding." ) @@ -633,18 +627,6 @@ def parse_args(): "--placeholder_tokens and --initializer_tokens must have the same number of items" ) - if isinstance(args.inverted_initializer_tokens, str): - args.inverted_initializer_tokens = [args.inverted_initializer_tokens] * len( - args.placeholder_tokens - ) - - if ( - isinstance(args.inverted_initializer_tokens, list) - and len(args.inverted_initializer_tokens) != 0 - ): - args.placeholder_tokens += [f"inv_{t}" for t in args.placeholder_tokens] - args.initializer_tokens += args.inverted_initializer_tokens - if isinstance(args.num_vectors, int): args.num_vectors = [args.num_vectors] * len(args.placeholder_tokens) -- cgit v1.2.3-54-g00ecf