From d960ad1c3c2625dfb3bbb112d054b3867d215c78 Mon Sep 17 00:00:00 2001 From: Pedro Cuenca Date: Mon, 10 Jul 2023 12:36:05 +0200 Subject: [PATCH] Remove remaining `not` in upscale pipeline. --- .../stable_diffusion/pipeline_stable_diffusion_upscale.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_upscale.py b/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_upscale.py index 68d13014c388..a7255424fb46 100644 --- a/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_upscale.py +++ b/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_upscale.py @@ -760,7 +760,7 @@ def __call__( # if xformers or torch_2_0 is used attention block does not need # to be in float32 which can save lots of memory - if not use_torch_2_0_or_xformers: + if use_torch_2_0_or_xformers: self.vae.post_quant_conv.to(latents.dtype) self.vae.decoder.conv_in.to(latents.dtype) self.vae.decoder.mid_block.to(latents.dtype)