Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,6 @@ import torch
|
|
4 |
from ultralytics import YOLO
|
5 |
import gradio as gr
|
6 |
from scipy.interpolate import interp1d
|
7 |
-
import plotly.graph_objects as go
|
8 |
import uuid
|
9 |
import os
|
10 |
|
@@ -45,8 +44,8 @@ def process_video(video_path):
|
|
45 |
break
|
46 |
frame_count += 1
|
47 |
frames.append(frame.copy())
|
48 |
-
# Use
|
49 |
-
results = model.predict(frame, conf=CONF_THRESHOLD, imgsz=(
|
50 |
detections = 0
|
51 |
for detection in results[0].boxes:
|
52 |
if detection.cls == 0: # Class 0 is the ball
|
@@ -146,7 +145,7 @@ def estimate_trajectory(ball_positions, frames, detection_frames):
|
|
146 |
|
147 |
trajectory_3d = [pixel_to_3d(x, y, frame_height, frame_width) for x, y in trajectory_2d]
|
148 |
detections_3d = [pixel_to_3d(x, y, frame_height, frame_width) for x, y in filtered_positions]
|
149 |
-
|
150 |
# Handle missing pitch and impact points gracefully
|
151 |
pitch_point_3d = pixel_to_3d(pitch_point[0], pitch_point[1], frame_height, frame_width) if pitch_point else None
|
152 |
impact_point_3d = pixel_to_3d(impact_point[0], impact_point[1], frame_height, frame_width) if impact_point else None
|
@@ -249,17 +248,9 @@ def drs_review(video):
|
|
249 |
output_path = f"output_{uuid.uuid4()}.mp4"
|
250 |
slow_motion_path = generate_slow_motion(frames, trajectory_2d, pitch_point, impact_point, detection_frames, pitch_frame, impact_frame, output_path)
|
251 |
|
252 |
-
detections_fig = None
|
253 |
-
trajectory_fig = None
|
254 |
-
if detections_3d:
|
255 |
-
detections_fig = create_3d_plot(detections_3d, trajectory_3d, pitch_point_3d, impact_point_3d, "detections")
|
256 |
-
trajectory_fig = create_3d_plot(detections_3d, trajectory_3d, pitch_point_3d, impact_point_3d, "trajectory")
|
257 |
-
|
258 |
debug_output = f"{debug_log}\n{trajectory_log}"
|
259 |
return (f"DRS Decision: {decision}\nDebug Log:\n{debug_output}",
|
260 |
-
slow_motion_path
|
261 |
-
detections_fig,
|
262 |
-
trajectory_fig)
|
263 |
|
264 |
# Gradio interface
|
265 |
iface = gr.Interface(
|
@@ -268,11 +259,9 @@ iface = gr.Interface(
|
|
268 |
outputs=[
|
269 |
gr.Textbox(label="DRS Decision and Debug Log"),
|
270 |
gr.Video(label="Very Slow-Motion Replay with Ball Detection (Green), Trajectory (Blue Line), Pitch Point (Red), Impact Point (Yellow)"),
|
271 |
-
gr.Plot(label="3D Single Ball Detections Plot"),
|
272 |
-
gr.Plot(label="3D Ball Trajectory Plot (Single Detections)")
|
273 |
],
|
274 |
title="AI-Powered DRS for LBW in Local Cricket",
|
275 |
-
description="Upload a video clip of a cricket delivery to get an LBW decision, a slow-motion replay, and 3D visualizations. The replay shows ball detection (green boxes), trajectory (blue line), pitch point (red circle), and impact point (yellow circle).
|
276 |
)
|
277 |
|
278 |
if __name__ == "__main__":
|
|
|
4 |
from ultralytics import YOLO
|
5 |
import gradio as gr
|
6 |
from scipy.interpolate import interp1d
|
|
|
7 |
import uuid
|
8 |
import os
|
9 |
|
|
|
44 |
break
|
45 |
frame_count += 1
|
46 |
frames.append(frame.copy())
|
47 |
+
# Use smaller image size to speed up detection
|
48 |
+
results = model.predict(frame, conf=CONF_THRESHOLD, imgsz=(384, 640), iou=0.5, max_det=1)
|
49 |
detections = 0
|
50 |
for detection in results[0].boxes:
|
51 |
if detection.cls == 0: # Class 0 is the ball
|
|
|
145 |
|
146 |
trajectory_3d = [pixel_to_3d(x, y, frame_height, frame_width) for x, y in trajectory_2d]
|
147 |
detections_3d = [pixel_to_3d(x, y, frame_height, frame_width) for x, y in filtered_positions]
|
148 |
+
|
149 |
# Handle missing pitch and impact points gracefully
|
150 |
pitch_point_3d = pixel_to_3d(pitch_point[0], pitch_point[1], frame_height, frame_width) if pitch_point else None
|
151 |
impact_point_3d = pixel_to_3d(impact_point[0], impact_point[1], frame_height, frame_width) if impact_point else None
|
|
|
248 |
output_path = f"output_{uuid.uuid4()}.mp4"
|
249 |
slow_motion_path = generate_slow_motion(frames, trajectory_2d, pitch_point, impact_point, detection_frames, pitch_frame, impact_frame, output_path)
|
250 |
|
|
|
|
|
|
|
|
|
|
|
|
|
251 |
debug_output = f"{debug_log}\n{trajectory_log}"
|
252 |
return (f"DRS Decision: {decision}\nDebug Log:\n{debug_output}",
|
253 |
+
slow_motion_path)
|
|
|
|
|
254 |
|
255 |
# Gradio interface
|
256 |
iface = gr.Interface(
|
|
|
259 |
outputs=[
|
260 |
gr.Textbox(label="DRS Decision and Debug Log"),
|
261 |
gr.Video(label="Very Slow-Motion Replay with Ball Detection (Green), Trajectory (Blue Line), Pitch Point (Red), Impact Point (Yellow)"),
|
|
|
|
|
262 |
],
|
263 |
title="AI-Powered DRS for LBW in Local Cricket",
|
264 |
+
description="Upload a video clip of a cricket delivery to get an LBW decision, a slow-motion replay, and 3D visualizations. The replay shows ball detection (green boxes), trajectory (blue line), pitch point (red circle), and impact point (yellow circle)."
|
265 |
)
|
266 |
|
267 |
if __name__ == "__main__":
|