svjack commited on
Commit
79cbf7f
·
verified ·
1 Parent(s): c62829f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
- import spaces
3
 
4
  os.environ['HF_HOME'] = os.path.join(os.path.dirname(__file__), 'hf_download')
5
  HF_TOKEN = os.environ['hf_token'] if 'hf_token' in os.environ else None
@@ -108,7 +108,7 @@ def resize_without_crop(image, target_width, target_height):
108
  return np.array(resized_image)
109
 
110
 
111
- @spaces.GPU(duration=120)
112
  @torch.inference_mode()
113
  def chat_fn(message: str, history: list, seed:int, temperature: float, top_p: float, max_new_tokens: int) -> str:
114
  print('Chat begin:', message)
@@ -185,7 +185,7 @@ def post_chat(history):
185
  return canvas_outputs, gr.update(visible=canvas_outputs is not None), gr.update(interactive=len(history) > 0)
186
 
187
 
188
- @spaces.GPU
189
  @torch.inference_mode()
190
  def diffusion_fn(chatbot, canvas_outputs, num_samples, seed, image_width, image_height,
191
  highres_scale, steps, cfg, highres_steps, highres_denoise, negative_prompt):
@@ -381,4 +381,4 @@ with gr.Blocks(
381
  ], outputs=[chatInterface.chatbot_state])
382
 
383
  if __name__ == "__main__":
384
- demo.queue().launch()
 
1
  import os
2
+ #import spaces
3
 
4
  os.environ['HF_HOME'] = os.path.join(os.path.dirname(__file__), 'hf_download')
5
  HF_TOKEN = os.environ['hf_token'] if 'hf_token' in os.environ else None
 
108
  return np.array(resized_image)
109
 
110
 
111
+ #@spaces.GPU(duration=120)
112
  @torch.inference_mode()
113
  def chat_fn(message: str, history: list, seed:int, temperature: float, top_p: float, max_new_tokens: int) -> str:
114
  print('Chat begin:', message)
 
185
  return canvas_outputs, gr.update(visible=canvas_outputs is not None), gr.update(interactive=len(history) > 0)
186
 
187
 
188
+ #@spaces.GPU
189
  @torch.inference_mode()
190
  def diffusion_fn(chatbot, canvas_outputs, num_samples, seed, image_width, image_height,
191
  highres_scale, steps, cfg, highres_steps, highres_denoise, negative_prompt):
 
381
  ], outputs=[chatInterface.chatbot_state])
382
 
383
  if __name__ == "__main__":
384
+ demo.queue().launch(share = True)