Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -148,99 +148,3 @@ interface = gr.Interface(
|
|
148 |
if __name__ == "__main__":
|
149 |
interface.launch()
|
150 |
|
151 |
-
"""""
|
152 |
-
import gradio as gr
|
153 |
-
import cv2
|
154 |
-
import numpy as np
|
155 |
-
import os
|
156 |
-
import torch
|
157 |
-
from ultralytics import YOLO
|
158 |
-
import spaces # Import ZeroGPU for Hugging Face Spaces
|
159 |
-
|
160 |
-
@spaces.GPU # Ensures GPU is allocated during execution
|
161 |
-
def process_video(video_path):
|
162 |
-
"""Process video using YOLOv8 for crowd detection."""
|
163 |
-
|
164 |
-
# Check if CUDA is available
|
165 |
-
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
166 |
-
print(f"🔍 Using device: {device}")
|
167 |
-
|
168 |
-
# Load YOLOv8 model on GPU
|
169 |
-
model = YOLO("yolov8n.pt").to(device)
|
170 |
-
|
171 |
-
# Read input video
|
172 |
-
cap = cv2.VideoCapture(video_path)
|
173 |
-
if not cap.isOpened():
|
174 |
-
raise ValueError(f"❌ Failed to open video: {video_path}")
|
175 |
-
|
176 |
-
fps = int(cap.get(cv2.CAP_PROP_FPS))
|
177 |
-
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
|
178 |
-
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
179 |
-
|
180 |
-
print(f"🎥 Video details - FPS: {fps}, Width: {width}, Height: {height}")
|
181 |
-
|
182 |
-
# Define output video path
|
183 |
-
output_path = "output_crowd.mp4"
|
184 |
-
fourcc = cv2.VideoWriter_fourcc(*"mp4v")
|
185 |
-
out = cv2.VideoWriter(output_path, fourcc, fps, (width, height))
|
186 |
-
|
187 |
-
CROWD_THRESHOLD = 10 # Define crowd limit for alerts
|
188 |
-
frame_count = 0
|
189 |
-
|
190 |
-
while cap.isOpened():
|
191 |
-
ret, frame = cap.read()
|
192 |
-
if not ret:
|
193 |
-
break # End of video
|
194 |
-
|
195 |
-
frame_count += 1
|
196 |
-
|
197 |
-
# Run YOLO inference on the frame
|
198 |
-
results = model(frame)
|
199 |
-
|
200 |
-
# Count detected persons
|
201 |
-
person_count = 0
|
202 |
-
for result in results:
|
203 |
-
boxes = result.boxes.xyxy.cpu().numpy()
|
204 |
-
classes = result.boxes.cls.cpu().numpy()
|
205 |
-
|
206 |
-
for box, cls in zip(boxes, classes):
|
207 |
-
if int(cls) == 0: # YOLO class ID 0 = "person"
|
208 |
-
person_count += 1
|
209 |
-
x1, y1, x2, y2 = map(int, box)
|
210 |
-
|
211 |
-
# Draw bounding box for persons
|
212 |
-
cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2)
|
213 |
-
cv2.putText(frame, "Person", (x1, y1 - 10),
|
214 |
-
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
|
215 |
-
|
216 |
-
# Display count on frame
|
217 |
-
alert_text = "Crowd Alert!" if person_count > CROWD_THRESHOLD else f"People: {person_count}"
|
218 |
-
cv2.putText(frame, alert_text, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 1,
|
219 |
-
(0, 0, 255) if person_count > CROWD_THRESHOLD else (0, 255, 0), 2)
|
220 |
-
|
221 |
-
out.write(frame) # Save frame to output video
|
222 |
-
|
223 |
-
cap.release()
|
224 |
-
out.release()
|
225 |
-
|
226 |
-
if frame_count == 0:
|
227 |
-
raise ValueError("❌ No frames were processed!")
|
228 |
-
|
229 |
-
if not os.path.exists(output_path):
|
230 |
-
raise FileNotFoundError(f"❌ Output video not found: {output_path}")
|
231 |
-
|
232 |
-
print(f"✅ Processed video saved at: {output_path}")
|
233 |
-
return output_path
|
234 |
-
|
235 |
-
# Gradio Interface
|
236 |
-
interface = gr.Interface(
|
237 |
-
fn=process_video,
|
238 |
-
inputs=gr.Video(label="Upload Video"),
|
239 |
-
outputs=gr.Video(label="Processed Video"),
|
240 |
-
title="Crowd Detection with YOLOv8",
|
241 |
-
description="Upload a video, and YOLOv8 will detect and count people. If the crowd exceeds 10 people, a warning will be displayed."
|
242 |
-
)
|
243 |
-
|
244 |
-
if __name__ == "__main__":
|
245 |
-
interface.launch()
|
246 |
-
""""
|
|
|
148 |
if __name__ == "__main__":
|
149 |
interface.launch()
|
150 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|