Ashoka74 commited on
Commit
429c426
Β·
verified Β·
1 Parent(s): 71830c5

Update bg_input_test.py

Browse files
Files changed (1) hide show
  1. bg_input_test.py +7 -6
bg_input_test.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import os
2
  import math
3
  import gradio as gr
@@ -128,7 +129,7 @@ i2i_pipe = StableDiffusionImg2ImgPipeline(
128
  image_encoder=None
129
  )
130
 
131
-
132
  @torch.inference_mode()
133
  def encode_prompt_inner(txt: str):
134
  max_length = tokenizer.model_max_length
@@ -149,7 +150,7 @@ def encode_prompt_inner(txt: str):
149
 
150
  return conds
151
 
152
-
153
  @torch.inference_mode()
154
  def encode_prompt_pair(positive_prompt, negative_prompt):
155
  c = encode_prompt_inner(positive_prompt)
@@ -170,7 +171,7 @@ def encode_prompt_pair(positive_prompt, negative_prompt):
170
 
171
  return c, uc
172
 
173
-
174
  @torch.inference_mode()
175
  def pytorch2numpy(imgs, quant=True):
176
  results = []
@@ -187,7 +188,7 @@ def pytorch2numpy(imgs, quant=True):
187
  results.append(y)
188
  return results
189
 
190
-
191
  @torch.inference_mode()
192
  def numpy2pytorch(imgs):
193
  h = torch.from_numpy(np.stack(imgs, axis=0)).float() / 127.0 - 1.0 # so that 127 must be strictly 0.0
@@ -215,7 +216,7 @@ def resize_without_crop(image, target_width, target_height):
215
  resized_image = pil_image.resize((target_width, target_height), Image.LANCZOS)
216
  return np.array(resized_image)
217
 
218
-
219
  @torch.inference_mode()
220
  def run_rmbg(img, sigma=0.0):
221
  H, W, C = img.shape
@@ -335,7 +336,7 @@ def process(input_fg, prompt, image_width, image_height, num_samples, seed, step
335
 
336
  return pytorch2numpy(pixels)
337
 
338
-
339
  @torch.inference_mode()
340
  def process_relight(input_fg, prompt, image_width, image_height, num_samples, seed, steps, a_prompt, n_prompt, cfg, highres_scale, highres_denoise, lowres_denoise, bg_source):
341
  input_fg, matting = run_rmbg(input_fg)
 
1
+ import spaces
2
  import os
3
  import math
4
  import gradio as gr
 
129
  image_encoder=None
130
  )
131
 
132
+ @spaces.GPU(duration=59)
133
  @torch.inference_mode()
134
  def encode_prompt_inner(txt: str):
135
  max_length = tokenizer.model_max_length
 
150
 
151
  return conds
152
 
153
+ @spaces.GPU(duration=59)
154
  @torch.inference_mode()
155
  def encode_prompt_pair(positive_prompt, negative_prompt):
156
  c = encode_prompt_inner(positive_prompt)
 
171
 
172
  return c, uc
173
 
174
+ @spaces.GPU(duration=59)
175
  @torch.inference_mode()
176
  def pytorch2numpy(imgs, quant=True):
177
  results = []
 
188
  results.append(y)
189
  return results
190
 
191
+ @spaces.GPU(duration=59)
192
  @torch.inference_mode()
193
  def numpy2pytorch(imgs):
194
  h = torch.from_numpy(np.stack(imgs, axis=0)).float() / 127.0 - 1.0 # so that 127 must be strictly 0.0
 
216
  resized_image = pil_image.resize((target_width, target_height), Image.LANCZOS)
217
  return np.array(resized_image)
218
 
219
+ @spaces.GPU(duration=59)
220
  @torch.inference_mode()
221
  def run_rmbg(img, sigma=0.0):
222
  H, W, C = img.shape
 
336
 
337
  return pytorch2numpy(pixels)
338
 
339
+ @spaces.GPU(duration=59)
340
  @torch.inference_mode()
341
  def process_relight(input_fg, prompt, image_width, image_height, num_samples, seed, steps, a_prompt, n_prompt, cfg, highres_scale, highres_denoise, lowres_denoise, bg_source):
342
  input_fg, matting = run_rmbg(input_fg)