Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -119,8 +119,8 @@ mask_adapter = None
|
|
119 |
@torch.no_grad()
|
120 |
@torch.autocast(device_type="cuda", dtype=torch.float32)
|
121 |
def inference_box(input_img, img_state,):
|
122 |
-
|
123 |
-
|
124 |
mp.set_start_method("spawn", force=True)
|
125 |
|
126 |
box_points = img_state.selected_bboxes
|
@@ -254,10 +254,9 @@ examples = [
|
|
254 |
|
255 |
examples_point = [
|
256 |
['./demo/images/ADE_val_00000739.jpg'],
|
257 |
-
['./demo/images/
|
258 |
-
['./demo/images/
|
259 |
['./demo/images/ADE_val_00000001.jpg'],
|
260 |
-
['./demo/images/000000033707.jpg'],
|
261 |
['./demo/images/000000000785.jpg'],
|
262 |
]
|
263 |
|
|
|
119 |
@torch.no_grad()
|
120 |
@torch.autocast(device_type="cuda", dtype=torch.float32)
|
121 |
def inference_box(input_img, img_state,):
|
122 |
+
if len(img_state.selected_bboxes) != 2:
|
123 |
+
return None
|
124 |
mp.set_start_method("spawn", force=True)
|
125 |
|
126 |
box_points = img_state.selected_bboxes
|
|
|
254 |
|
255 |
examples_point = [
|
256 |
['./demo/images/ADE_val_00000739.jpg'],
|
257 |
+
['./demo/images/000000290833.jpg'],
|
258 |
+
['./demo/images/2010_001315.jpg'],
|
259 |
['./demo/images/ADE_val_00000001.jpg'],
|
|
|
260 |
['./demo/images/000000000785.jpg'],
|
261 |
]
|
262 |
|