From e454b7e7df13cf6ce7b96b7dcc107533edf83f6f Mon Sep 17 00:00:00 2001 From: Volpeon Date: Thu, 12 Jan 2023 08:51:17 +0100 Subject: Fixed TI decay --- models/clip/embeddings.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) (limited to 'models/clip/embeddings.py') diff --git a/models/clip/embeddings.py b/models/clip/embeddings.py index 9d8f770..46b414b 100644 --- a/models/clip/embeddings.py +++ b/models/clip/embeddings.py @@ -3,6 +3,7 @@ from pathlib import Path import torch import torch.nn as nn +import torch.nn.functional as F from safetensors import safe_open from safetensors.torch import save_file @@ -45,7 +46,7 @@ class ManagedCLIPTextEmbeddings(CLIPTextEmbeddings): device=self.token_embedding.weight.device, dtype=self.token_embedding.weight.dtype ) - self.temp_token_embedding.weight.data.normal_(mean=0.0, std=self.initializer_factor * 0.02) + self.temp_token_embedding.weight.data = self.token_embedding.weight.data.clone().detach() self.temp_token_ids = torch.tensor([], dtype=torch.long) def resize(self, size: int): @@ -98,6 +99,13 @@ class ManagedCLIPTextEmbeddings(CLIPTextEmbeddings): return embeds + def normalize(self, lambda_: float = 1.0): + w = self.temp_token_embedding.weight + pre_norm = w[self.temp_token_ids, :].norm(dim=-1, keepdim=True) + w[self.temp_token_ids] = F.normalize( + w[self.temp_token_ids, :], dim=-1 + ) * (pre_norm + lambda_ * (0.4 - pre_norm)) + def forward( self, input_ids: Optional[torch.LongTensor] = None, -- cgit v1.2.3-70-g09d2