Spaces:
vilarin
/
Running on Zero

vilarin commited on
Commit
f38eadf
·
verified ·
1 Parent(s): 8dd5bc6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -94,6 +94,7 @@ class ModelWrapper:
94
  for constant in all_timesteps:
95
  current_timesteps = torch.ones(len(prompt_embed), device="cuda", dtype=torch.long) * constant
96
  eval_images = self.model(noise, current_timesteps, prompt_embed, added_cond_kwargs=unet_added_conditions).sample
 
97
 
98
  eval_images = get_x0_from_noise(noise, eval_images, alphas_cumprod, current_timesteps).to(self.DTYPE)
99
 
@@ -138,7 +139,7 @@ class ModelWrapper:
138
  }
139
 
140
  eval_images = self.sample(noise=noise, unet_added_conditions=unet_added_conditions, prompt_embed=batch_prompt_embeds, fast_vae_decode=fast_vae_decode)
141
-
142
  end_time = self._get_time()
143
 
144
  output_image_list = []
 
94
  for constant in all_timesteps:
95
  current_timesteps = torch.ones(len(prompt_embed), device="cuda", dtype=torch.long) * constant
96
  eval_images = self.model(noise, current_timesteps, prompt_embed, added_cond_kwargs=unet_added_conditions).sample
97
+ print(typeof(eval_images))
98
 
99
  eval_images = get_x0_from_noise(noise, eval_images, alphas_cumprod, current_timesteps).to(self.DTYPE)
100
 
 
139
  }
140
 
141
  eval_images = self.sample(noise=noise, unet_added_conditions=unet_added_conditions, prompt_embed=batch_prompt_embeds, fast_vae_decode=fast_vae_decode)
142
+ print(typeof(eval_images))
143
  end_time = self._get_time()
144
 
145
  output_image_list = []