prithivMLmods commited on
Commit
4f589b5
·
verified ·
1 Parent(s): d37e3c2
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -212,7 +212,7 @@ def adjust_generation_mode(speed_mode):
212
  else:
213
  return gr.update(value="Base mode selected - 48 steps for best quality"), 48, 4.0
214
 
215
- @spaces.GPU(duration=108)
216
  def create_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, negative_prompt=""):
217
  pipe.to("cuda")
218
  generator = torch.Generator(device="cuda").manual_seed(seed)
@@ -231,7 +231,7 @@ def create_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale,
231
 
232
  return image
233
 
234
- @spaces.GPU(duration=108)
235
  def process_adapter_generation(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, aspect_ratio, lora_scale, speed_mode, progress=gr.Progress(track_tqdm=True)):
236
  if selected_index is None:
237
  raise gr.Error("You must select a LoRA before proceeding.")
@@ -540,5 +540,5 @@ with gr.Blocks(theme="bethecloud/storj_theme", css=css, delete_cache=(120, 120))
540
  outputs=[result, seed]
541
  )
542
 
543
- app.queue()
544
  app.launch(share=False, ssr_mode=False, show_error=True)
 
212
  else:
213
  return gr.update(value="Base mode selected - 48 steps for best quality"), 48, 4.0
214
 
215
+ @spaces.GPU(duration=100)
216
  def create_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, negative_prompt=""):
217
  pipe.to("cuda")
218
  generator = torch.Generator(device="cuda").manual_seed(seed)
 
231
 
232
  return image
233
 
234
+ @spaces.GPU(duration=100)
235
  def process_adapter_generation(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, aspect_ratio, lora_scale, speed_mode, progress=gr.Progress(track_tqdm=True)):
236
  if selected_index is None:
237
  raise gr.Error("You must select a LoRA before proceeding.")
 
540
  outputs=[result, seed]
541
  )
542
 
543
+ app.queue(max_size=50)
544
  app.launch(share=False, ssr_mode=False, show_error=True)