summaryrefslogtreecommitdiffstats
path: root/models/clip
diff options
context:
space:
mode:
authorVolpeon <git@volpeon.ink>2023-04-15 13:31:24 +0200
committerVolpeon <git@volpeon.ink>2023-04-15 13:31:24 +0200
commitd488f66c78e444d03c4ef8a957b82f8b239379d0 (patch)
tree864b2fe8d03b0cdfc3437622a0dcd5a1ede60e16 /models/clip
parentTI via LoRA (diff)
downloadtextual-inversion-diff-d488f66c78e444d03c4ef8a957b82f8b239379d0.tar.gz
textual-inversion-diff-d488f66c78e444d03c4ef8a957b82f8b239379d0.tar.bz2
textual-inversion-diff-d488f66c78e444d03c4ef8a957b82f8b239379d0.zip
Fix
Diffstat (limited to 'models/clip')
-rw-r--r--models/clip/embeddings.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/models/clip/embeddings.py b/models/clip/embeddings.py
index 60c1b20..840f8ae 100644
--- a/models/clip/embeddings.py
+++ b/models/clip/embeddings.py
@@ -2,7 +2,6 @@ from typing import Union, Optional
2from pathlib import Path 2from pathlib import Path
3 3
4import torch 4import torch
5import torch.nn as nn
6 5
7from safetensors import safe_open 6from safetensors import safe_open
8from safetensors.torch import save_file 7from safetensors.torch import save_file
@@ -64,6 +63,7 @@ class ManagedCLIPTextEmbeddings(CLIPTextEmbeddings):
64 63
65 token_ids = torch.tensor(token_ids, dtype=torch.long) 64 token_ids = torch.tensor(token_ids, dtype=torch.long)
66 65
66 self.token_embedding.mark_trainable(token_ids)
67 self.token_embedding.weight.data[token_ids] = initializer 67 self.token_embedding.weight.data[token_ids] = initializer
68 68
69 def load_embed(self, input_ids: list[int], filename: Path): 69 def load_embed(self, input_ids: list[int], filename: Path):