diff options
author | Volpeon <git@volpeon.ink> | 2022-11-14 17:09:58 +0100 |
---|---|---|
committer | Volpeon <git@volpeon.ink> | 2022-11-14 17:09:58 +0100 |
commit | 2ad46871e2ead985445da2848a4eb7072b6e48aa (patch) | |
tree | 3137923e2c00fe1d3cd37ddcc93c8a847b0c0762 /training | |
parent | Update (diff) | |
download | textual-inversion-diff-2ad46871e2ead985445da2848a4eb7072b6e48aa.tar.gz textual-inversion-diff-2ad46871e2ead985445da2848a4eb7072b6e48aa.tar.bz2 textual-inversion-diff-2ad46871e2ead985445da2848a4eb7072b6e48aa.zip |
Update
Diffstat (limited to 'training')
-rw-r--r-- | training/optimization.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/training/optimization.py b/training/optimization.py index 0fd7ec8..0e603fa 100644 --- a/training/optimization.py +++ b/training/optimization.py | |||
@@ -6,7 +6,7 @@ from diffusers.utils import logging | |||
6 | logger = logging.get_logger(__name__) | 6 | logger = logging.get_logger(__name__) |
7 | 7 | ||
8 | 8 | ||
9 | def get_one_cycle_schedule(optimizer, num_training_steps, annealing="cos", min_lr=0.05, mid_point=0.43, last_epoch=-1): | 9 | def get_one_cycle_schedule(optimizer, num_training_steps, annealing="cos", min_lr=0.05, mid_point=0.4, last_epoch=-1): |
10 | """ | 10 | """ |
11 | Create a schedule with a learning rate that decreases linearly from the initial lr set in the optimizer to 0, after | 11 | Create a schedule with a learning rate that decreases linearly from the initial lr set in the optimizer to 0, after |
12 | a warmup period during which it increases linearly from 0 to the initial lr set in the optimizer. | 12 | a warmup period during which it increases linearly from 0 to the initial lr set in the optimizer. |