Spaces:
vilarin
/
Running on Zero

vilarin commited on
Commit
52c882d
·
verified ·
1 Parent(s): 36d0a3b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -109,7 +109,6 @@ class ModelWrapper:
109
 
110
 
111
  @torch.no_grad()
112
- @spaces.GPU()
113
  def inference(self, prompt, seed, height, width, num_images, fast_vae_decode):
114
  print("Running model inference...")
115
 
@@ -148,6 +147,7 @@ class ModelWrapper:
148
 
149
  return output_image_list, f"Run successfully in {(end_time-start_time):.2f} seconds"
150
 
 
151
  def get_x0_from_noise(sample, model_output, alphas_cumprod, timestep):
152
  alpha_prod_t = alphas_cumprod[timestep].reshape(-1, 1, 1, 1)
153
  beta_prod_t = 1 - alpha_prod_t
@@ -184,7 +184,7 @@ class SDXLTextEncoder(torch.nn.Module):
184
 
185
  return prompt_embeds, pooled_prompt_embeds
186
 
187
-
188
  def create_demo():
189
  TITLE = "# DMD2-SDXL Demo"
190
  model_id = "stabilityai/stable-diffusion-xl-base-1.0"
 
109
 
110
 
111
  @torch.no_grad()
 
112
  def inference(self, prompt, seed, height, width, num_images, fast_vae_decode):
113
  print("Running model inference...")
114
 
 
147
 
148
  return output_image_list, f"Run successfully in {(end_time-start_time):.2f} seconds"
149
 
150
+
151
  def get_x0_from_noise(sample, model_output, alphas_cumprod, timestep):
152
  alpha_prod_t = alphas_cumprod[timestep].reshape(-1, 1, 1, 1)
153
  beta_prod_t = 1 - alpha_prod_t
 
184
 
185
  return prompt_embeds, pooled_prompt_embeds
186
 
187
+ @spaces.GPU(duration=100)
188
  def create_demo():
189
  TITLE = "# DMD2-SDXL Demo"
190
  model_id = "stabilityai/stable-diffusion-xl-base-1.0"