import torch def set_use_memory_efficient_attention_xformers(module: torch.nn.Module, valid: bool) -> None: if hasattr(module, "set_use_memory_efficient_attention_xformers"): module.set_use_memory_efficient_attention_xformers(valid) for child in module.children(): set_use_memory_efficient_attention_xformers(child, valid)