Saad0KH commited on
Commit
3a83ac2
·
verified ·
1 Parent(s): defa56c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -22
app.py CHANGED
@@ -1,9 +1,7 @@
1
  #!/usr/bin/env python
2
 
3
  from __future__ import annotations
4
-
5
  import pathlib
6
-
7
  import cv2
8
  import gradio as gr
9
  import huggingface_hub
@@ -43,34 +41,24 @@ def detect_person(
43
  return bboxes, vbboxes
44
 
45
 
46
- def visualize(image: np.ndarray, bboxes: np.ndarray, vbboxes: np.ndarray) -> np.ndarray:
47
- res = image.copy()
48
- for i in range(bboxes.shape[0]):
49
- bbox = bboxes[i]
50
- vbbox = vbboxes[i]
51
  x1, y1, x2, y2 = bbox
52
- vx1, vy1, vx2, vy2 = vbbox
53
- cv2.rectangle(res, (x1, y1), (x2, y2), (0, 255, 0), 1)
54
- alpha = 0.8
55
- color = (255, 0, 0)
56
- for c in range(3):
57
- res[vy1:vy2, vx1:vx2, c] = res[vy1:vy2, vx1:vx2, c] * alpha + color[c] * (1.0 - alpha)
58
- cv2.circle(res, (vx1, vy1), 1, color, 2)
59
- cv2.circle(res, (vx1, vy2), 1, color, 2)
60
- cv2.circle(res, (vx2, vy1), 1, color, 2)
61
- cv2.circle(res, (vx2, vy2), 1, color, 2)
62
- return res
63
 
64
 
65
  detector = load_model()
66
  detector.prepare(-1, nms_thresh=0.5, input_size=(640, 640))
67
 
68
 
69
- def detect(image: np.ndarray) -> np.ndarray:
70
  image = image[:, :, ::-1] # RGB -> BGR
71
  bboxes, vbboxes = detect_person(image, detector)
72
- res = visualize(image, bboxes, vbboxes)
73
- return res[:, :, ::-1] # BGR -> RGB
74
 
75
 
76
  examples = sorted(pathlib.Path("images").glob("*.jpg"))
@@ -78,7 +66,7 @@ examples = sorted(pathlib.Path("images").glob("*.jpg"))
78
  demo = gr.Interface(
79
  fn=detect,
80
  inputs=gr.Image(label="Input", type="numpy"),
81
- outputs=gr.Image(label="Output"),
82
  examples=examples,
83
  examples_per_page=30,
84
  title=TITLE,
 
1
  #!/usr/bin/env python
2
 
3
  from __future__ import annotations
 
4
  import pathlib
 
5
  import cv2
6
  import gradio as gr
7
  import huggingface_hub
 
41
  return bboxes, vbboxes
42
 
43
 
44
+ def extract_persons(image: np.ndarray, bboxes: np.ndarray) -> list[np.ndarray]:
45
+ person_images = []
46
+ for bbox in bboxes:
 
 
47
  x1, y1, x2, y2 = bbox
48
+ person_image = image[y1:y2, x1:x2] # Crop the detected person
49
+ person_images.append(person_image)
50
+ return person_images
 
 
 
 
 
 
 
 
51
 
52
 
53
  detector = load_model()
54
  detector.prepare(-1, nms_thresh=0.5, input_size=(640, 640))
55
 
56
 
57
+ def detect(image: np.ndarray) -> list[np.ndarray]:
58
  image = image[:, :, ::-1] # RGB -> BGR
59
  bboxes, vbboxes = detect_person(image, detector)
60
+ person_images = extract_persons(image, bboxes) # Extract each person as a separate image
61
+ return [person_img[:, :, ::-1] for person_img in person_images] # BGR -> RGB
62
 
63
 
64
  examples = sorted(pathlib.Path("images").glob("*.jpg"))
 
66
  demo = gr.Interface(
67
  fn=detect,
68
  inputs=gr.Image(label="Input", type="numpy"),
69
+ outputs=gr.Gallery(label="Detected Persons"), # Display a gallery of cropped images
70
  examples=examples,
71
  examples_per_page=30,
72
  title=TITLE,