From 347ad308f8223d966793f0421c72432f7e912377 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Wed, 8 Feb 2023 11:38:56 +0100 Subject: Integrate Self-Attention-Guided (SAG) Stable Diffusion in my custom pipeline --- train_lora.py | 8 -------- 1 file changed, 8 deletions(-) (limited to 'train_lora.py') diff --git a/train_lora.py b/train_lora.py index ab1753b..5fd05cc 100644 --- a/train_lora.py +++ b/train_lora.py @@ -177,11 +177,6 @@ def parse_args(): default=1, help="Number of updates steps to accumulate before performing a backward/update pass.", ) - parser.add_argument( - "--gradient_checkpointing", - action="store_true", - help="Whether or not to use gradient checkpointing to save memory at the expense of slower backward pass.", - ) parser.add_argument( "--find_lr", action="store_true", @@ -429,9 +424,6 @@ def main(): vae.set_use_memory_efficient_attention_xformers(True) unet.enable_xformers_memory_efficient_attention() - if args.gradient_checkpointing: - unet.enable_gradient_checkpointing() - unet.to(accelerator.device, dtype=weight_dtype) text_encoder.to(accelerator.device, dtype=weight_dtype) -- cgit v1.2.3-54-g00ecf