Spaces:
Sleeping
Sleeping
Commit
Β·
f137e13
1
Parent(s):
02267af
update
Browse files- app.py +6 -4
- requirements.txt +2 -1
app.py
CHANGED
@@ -122,7 +122,7 @@ pipe = TryonPipeline.from_pretrained(
|
|
122 |
pipe.unet_encoder = UNet_Encoder
|
123 |
|
124 |
@spaces.GPU
|
125 |
-
def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
|
126 |
device = "cuda"
|
127 |
|
128 |
openpose_model.preprocessor.body_estimation.model.to(device)
|
@@ -150,7 +150,8 @@ def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_ste
|
|
150 |
if is_checked:
|
151 |
keypoints = openpose_model(human_img.resize((384,512)))
|
152 |
model_parse, _ = parsing_model(human_img.resize((384,512)))
|
153 |
-
mask, mask_gray = get_mask_location('hd', "upper_body", model_parse, keypoints)
|
|
|
154 |
mask = mask.resize((768,1024))
|
155 |
else:
|
156 |
mask = pil_to_binary_mask(dict['layers'][0].convert("RGB").resize((768, 1024)))
|
@@ -266,6 +267,7 @@ with image_blocks as demo:
|
|
266 |
with gr.Column():
|
267 |
imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
|
268 |
with gr.Row():
|
|
|
269 |
is_checked = gr.Checkbox(label="Yes", info="Use auto-generated mask (Takes 5 seconds)",value=True)
|
270 |
with gr.Row():
|
271 |
is_checked_crop = gr.Checkbox(label="Yes", info="Use auto-crop & resizing",value=False)
|
@@ -304,10 +306,10 @@ with image_blocks as demo:
|
|
304 |
|
305 |
|
306 |
|
307 |
-
try_button.click(fn=start_tryon, inputs=[imgs, garm_img, prompt, is_checked,is_checked_crop, denoise_steps, seed], outputs=[image_out,masked_img], api_name='tryon')
|
308 |
|
309 |
|
310 |
|
311 |
|
312 |
-
image_blocks.launch()
|
313 |
|
|
|
122 |
pipe.unet_encoder = UNet_Encoder
|
123 |
|
124 |
@spaces.GPU
|
125 |
+
def start_tryon(dict,garm_img,garment_des,category,is_checked,is_checked_crop,denoise_steps,seed):
|
126 |
device = "cuda"
|
127 |
|
128 |
openpose_model.preprocessor.body_estimation.model.to(device)
|
|
|
150 |
if is_checked:
|
151 |
keypoints = openpose_model(human_img.resize((384,512)))
|
152 |
model_parse, _ = parsing_model(human_img.resize((384,512)))
|
153 |
+
# mask, mask_gray = get_mask_location('hd', "upper_body", model_parse, keypoints)
|
154 |
+
mask, mask_gray = get_mask_location('hd', category, model_parse, keypoints)
|
155 |
mask = mask.resize((768,1024))
|
156 |
else:
|
157 |
mask = pil_to_binary_mask(dict['layers'][0].convert("RGB").resize((768, 1024)))
|
|
|
267 |
with gr.Column():
|
268 |
imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
|
269 |
with gr.Row():
|
270 |
+
category = gr.Radio(choices=["upper_body", "lower_body", "dresses"], label="Select Garment Category", value="upper_body")
|
271 |
is_checked = gr.Checkbox(label="Yes", info="Use auto-generated mask (Takes 5 seconds)",value=True)
|
272 |
with gr.Row():
|
273 |
is_checked_crop = gr.Checkbox(label="Yes", info="Use auto-crop & resizing",value=False)
|
|
|
306 |
|
307 |
|
308 |
|
309 |
+
try_button.click(fn=start_tryon, inputs=[imgs, garm_img, prompt, category, is_checked,is_checked_crop, denoise_steps, seed], outputs=[image_out,masked_img], api_name='tryon')
|
310 |
|
311 |
|
312 |
|
313 |
|
314 |
+
image_blocks.launch(server_port=3024, server_name="0.0.0.0")
|
315 |
|
requirements.txt
CHANGED
@@ -20,4 +20,5 @@ av
|
|
20 |
fvcore
|
21 |
cloudpickle
|
22 |
omegaconf
|
23 |
-
pycocotools
|
|
|
|
20 |
fvcore
|
21 |
cloudpickle
|
22 |
omegaconf
|
23 |
+
pycocotools
|
24 |
+
gradio
|