From e454b7e7df13cf6ce7b96b7dcc107533edf83f6f Mon Sep 17 00:00:00 2001 From: Volpeon Date: Thu, 12 Jan 2023 08:51:17 +0100 Subject: Fixed TI decay --- train_ti.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) (limited to 'train_ti.py') diff --git a/train_ti.py b/train_ti.py index 2c5037f..890c465 100644 --- a/train_ti.py +++ b/train_ti.py @@ -7,7 +7,6 @@ from pathlib import Path from contextlib import contextmanager, nullcontext import torch -import torch.nn.functional as F import torch.utils.checkpoint from accelerate import Accelerator @@ -166,7 +165,7 @@ def parse_args(): parser.add_argument( "--tag_dropout", type=float, - default=0.1, + default=0, help="Tag dropout probability.", ) parser.add_argument( @@ -177,7 +176,7 @@ def parse_args(): parser.add_argument( "--vector_dropout", type=int, - default=0.1, + default=0, help="Vector dropout probability.", ) parser.add_argument( @@ -869,11 +868,7 @@ def main(): @torch.no_grad() def on_clip(lr): - embeddings = text_encoder.text_model.embeddings.temp_token_embedding - - pre_norm = embeddings.weight.norm(dim=-1, keepdim=True) - lambda_ = min(1.0, 100 * lr) - embeddings.weight[:] = F.normalize(embeddings.weight, dim=-1) * (pre_norm + lambda_ * (0.4 - pre_norm)) + text_encoder.text_model.embeddings.normalize(min(1.0, 100 * lr)) loop = partial( loss_step, -- cgit v1.2.3-54-g00ecf