summaryrefslogtreecommitdiffstats
path: root/infer.py
diff options
context:
space:
mode:
Diffstat (limited to 'infer.py')
-rw-r--r--infer.py16
1 files changed, 4 insertions, 12 deletions
diff --git a/infer.py b/infer.py
index 01010eb..ac05955 100644
--- a/infer.py
+++ b/infer.py
@@ -12,7 +12,7 @@ from diffusers import AutoencoderKL, UNet2DConditionModel, PNDMScheduler, DDIMSc
12from transformers import CLIPTextModel, CLIPTokenizer 12from transformers import CLIPTextModel, CLIPTokenizer
13from slugify import slugify 13from slugify import slugify
14 14
15from schedulers.scheduling_euler_a import EulerAScheduler 15from schedulers.scheduling_euler_ancestral_discrete import EulerAncestralDiscreteScheduler
16from pipelines.stable_diffusion.vlpn_stable_diffusion import VlpnStableDiffusion 16from pipelines.stable_diffusion.vlpn_stable_diffusion import VlpnStableDiffusion
17 17
18 18
@@ -175,16 +175,8 @@ def load_embeddings_ti(tokenizer, text_encoder, embeddings_dir):
175 embeddings_dir = Path(embeddings_dir) 175 embeddings_dir = Path(embeddings_dir)
176 embeddings_dir.mkdir(parents=True, exist_ok=True) 176 embeddings_dir.mkdir(parents=True, exist_ok=True)
177 177
178 for file in embeddings_dir.iterdir(): 178 placeholder_tokens = [file.stem for file in embeddings_dir.iterdir() if file.is_file()]
179 if file.is_file(): 179 tokenizer.add_tokens(placeholder_tokens)
180 placeholder_token = file.stem
181
182 num_added_tokens = tokenizer.add_tokens(placeholder_token)
183 if num_added_tokens == 0:
184 raise ValueError(
185 f"The tokenizer already contains the token {placeholder_token}. Please pass a different"
186 " `placeholder_token` that is not already in the tokenizer."
187 )
188 180
189 text_encoder.resize_token_embeddings(len(tokenizer)) 181 text_encoder.resize_token_embeddings(len(tokenizer))
190 182
@@ -231,7 +223,7 @@ def create_pipeline(model, scheduler, ti_embeddings_dir, dtype):
231 beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear", clip_sample=False, set_alpha_to_one=False 223 beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear", clip_sample=False, set_alpha_to_one=False
232 ) 224 )
233 else: 225 else:
234 scheduler = EulerAScheduler( 226 scheduler = EulerAncestralDiscreteScheduler(
235 beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear" 227 beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear"
236 ) 228 )
237 229