diff options
author | Volpeon <git@volpeon.ink> | 2023-04-08 17:38:49 +0200 |
---|---|---|
committer | Volpeon <git@volpeon.ink> | 2023-04-08 17:38:49 +0200 |
commit | 9f5f70cb2a8919cb07821f264bf0fd75bfa10584 (patch) | |
tree | 19bd8802b6cfd941797beabfc0bb2595ffb00b5f /training/strategy/lora.py | |
parent | Fix TI (diff) | |
download | textual-inversion-diff-9f5f70cb2a8919cb07821f264bf0fd75bfa10584.tar.gz textual-inversion-diff-9f5f70cb2a8919cb07821f264bf0fd75bfa10584.tar.bz2 textual-inversion-diff-9f5f70cb2a8919cb07821f264bf0fd75bfa10584.zip |
Update
Diffstat (limited to 'training/strategy/lora.py')
-rw-r--r-- | training/strategy/lora.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/training/strategy/lora.py b/training/strategy/lora.py index cfdc504..ae85401 100644 --- a/training/strategy/lora.py +++ b/training/strategy/lora.py | |||
@@ -93,7 +93,7 @@ def lora_strategy_callbacks( | |||
93 | if use_emb_decay: | 93 | if use_emb_decay: |
94 | params = [ | 94 | params = [ |
95 | p | 95 | p |
96 | for p in text_encoder.text_model.embeddings.token_override_embedding.params | 96 | for p in text_encoder.text_model.embeddings.token_override_embedding.parameters() |
97 | if p.grad is not None | 97 | if p.grad is not None |
98 | ] | 98 | ] |
99 | return torch.stack(params) if len(params) != 0 else None | 99 | return torch.stack(params) if len(params) != 0 else None |