Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,7 @@
|
|
1 |
#!/usr/bin/env python
|
2 |
|
3 |
from __future__ import annotations
|
4 |
-
|
5 |
import pathlib
|
6 |
-
|
7 |
import cv2
|
8 |
import gradio as gr
|
9 |
import huggingface_hub
|
@@ -43,34 +41,24 @@ def detect_person(
|
|
43 |
return bboxes, vbboxes
|
44 |
|
45 |
|
46 |
-
def
|
47 |
-
|
48 |
-
for
|
49 |
-
bbox = bboxes[i]
|
50 |
-
vbbox = vbboxes[i]
|
51 |
x1, y1, x2, y2 = bbox
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
color = (255, 0, 0)
|
56 |
-
for c in range(3):
|
57 |
-
res[vy1:vy2, vx1:vx2, c] = res[vy1:vy2, vx1:vx2, c] * alpha + color[c] * (1.0 - alpha)
|
58 |
-
cv2.circle(res, (vx1, vy1), 1, color, 2)
|
59 |
-
cv2.circle(res, (vx1, vy2), 1, color, 2)
|
60 |
-
cv2.circle(res, (vx2, vy1), 1, color, 2)
|
61 |
-
cv2.circle(res, (vx2, vy2), 1, color, 2)
|
62 |
-
return res
|
63 |
|
64 |
|
65 |
detector = load_model()
|
66 |
detector.prepare(-1, nms_thresh=0.5, input_size=(640, 640))
|
67 |
|
68 |
|
69 |
-
def detect(image: np.ndarray) -> np.ndarray:
|
70 |
image = image[:, :, ::-1] # RGB -> BGR
|
71 |
bboxes, vbboxes = detect_person(image, detector)
|
72 |
-
|
73 |
-
return
|
74 |
|
75 |
|
76 |
examples = sorted(pathlib.Path("images").glob("*.jpg"))
|
@@ -78,7 +66,7 @@ examples = sorted(pathlib.Path("images").glob("*.jpg"))
|
|
78 |
demo = gr.Interface(
|
79 |
fn=detect,
|
80 |
inputs=gr.Image(label="Input", type="numpy"),
|
81 |
-
outputs=gr.
|
82 |
examples=examples,
|
83 |
examples_per_page=30,
|
84 |
title=TITLE,
|
|
|
1 |
#!/usr/bin/env python
|
2 |
|
3 |
from __future__ import annotations
|
|
|
4 |
import pathlib
|
|
|
5 |
import cv2
|
6 |
import gradio as gr
|
7 |
import huggingface_hub
|
|
|
41 |
return bboxes, vbboxes
|
42 |
|
43 |
|
44 |
+
def extract_persons(image: np.ndarray, bboxes: np.ndarray) -> list[np.ndarray]:
|
45 |
+
person_images = []
|
46 |
+
for bbox in bboxes:
|
|
|
|
|
47 |
x1, y1, x2, y2 = bbox
|
48 |
+
person_image = image[y1:y2, x1:x2] # Crop the detected person
|
49 |
+
person_images.append(person_image)
|
50 |
+
return person_images
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
|
53 |
detector = load_model()
|
54 |
detector.prepare(-1, nms_thresh=0.5, input_size=(640, 640))
|
55 |
|
56 |
|
57 |
+
def detect(image: np.ndarray) -> list[np.ndarray]:
|
58 |
image = image[:, :, ::-1] # RGB -> BGR
|
59 |
bboxes, vbboxes = detect_person(image, detector)
|
60 |
+
person_images = extract_persons(image, bboxes) # Extract each person as a separate image
|
61 |
+
return [person_img[:, :, ::-1] for person_img in person_images] # BGR -> RGB
|
62 |
|
63 |
|
64 |
examples = sorted(pathlib.Path("images").glob("*.jpg"))
|
|
|
66 |
demo = gr.Interface(
|
67 |
fn=detect,
|
68 |
inputs=gr.Image(label="Input", type="numpy"),
|
69 |
+
outputs=gr.Gallery(label="Detected Persons"), # Display a gallery of cropped images
|
70 |
examples=examples,
|
71 |
examples_per_page=30,
|
72 |
title=TITLE,
|