From 186a69104530610f8c2b924f79a04f941e5238c8 Mon Sep 17 00:00:00 2001 From: Volpeon Date: Thu, 22 Jun 2023 07:33:29 +0200 Subject: Remove convnext --- training/functional.py | 13 ------------- 1 file changed, 13 deletions(-) (limited to 'training') diff --git a/training/functional.py b/training/functional.py index 3c7848f..a3d1f08 100644 --- a/training/functional.py +++ b/training/functional.py @@ -29,11 +29,8 @@ from pipelines.stable_diffusion.vlpn_stable_diffusion import VlpnStableDiffusion from models.clip.embeddings import ManagedCLIPTextEmbeddings from models.clip.util import get_extended_embeddings from models.clip.tokenizer import MultiCLIPTokenizer -from models.convnext.discriminator import ConvNeXtDiscriminator from training.util import AverageMeter from training.sampler import ScheduleSampler, LossAwareSampler, UniformSampler -from util.slerp import slerp -from util.noise import perlin_noise def const(result=None): @@ -349,7 +346,6 @@ def loss_step( prior_loss_weight: float, seed: int, input_pertubation: float, - disc: Optional[ConvNeXtDiscriminator], min_snr_gamma: int, step: int, batch: dict[str, Any], @@ -449,13 +445,6 @@ def loss_step( loss = loss.mean([1, 2, 3]) - if disc is not None: - rec_latent = get_original(noise_scheduler, model_pred, noisy_latents, timesteps) - rec_latent = rec_latent / vae.config.scaling_factor - rec_latent = rec_latent.to(dtype=vae.dtype) - rec = vae.decode(rec_latent, return_dict=False)[0] - loss = 1 - disc.get_score(rec) - if min_snr_gamma != 0: snr = compute_snr(timesteps, noise_scheduler) mse_loss_weights = ( @@ -741,7 +730,6 @@ def train( guidance_scale: float = 0.0, prior_loss_weight: float = 1.0, input_pertubation: float = 0.1, - disc: Optional[ConvNeXtDiscriminator] = None, schedule_sampler: Optional[ScheduleSampler] = None, min_snr_gamma: int = 5, avg_loss: AverageMeter = AverageMeter(), @@ -803,7 +791,6 @@ def train( prior_loss_weight, seed, input_pertubation, - disc, min_snr_gamma, ) -- cgit v1.2.3-54-g00ecf