AjaykumarPilla commited on
Commit
d41a272
·
verified ·
1 Parent(s): 7a2099e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -21
app.py CHANGED
@@ -57,13 +57,21 @@ def process_video(video_path):
57
  def estimate_trajectory(ball_positions, frames):
58
  if len(ball_positions) < 2:
59
  return None, None, None, "Error: Fewer than 2 ball detections for trajectory"
 
60
  frame_height = frames[0].shape[0]
61
-
62
  # Extract x, y coordinates
63
  x_coords = [pos[0] for pos in ball_positions]
64
  y_coords = [pos[1] for pos in ball_positions]
65
  times = np.arange(len(ball_positions)) / FRAME_RATE
66
 
 
 
 
 
 
 
 
67
  # Find impact point (closest to batsman, near stumps)
68
  impact_idx = None
69
  for i, y in enumerate(y_coords):
@@ -72,8 +80,7 @@ def estimate_trajectory(ball_positions, frames):
72
  break
73
  if impact_idx is None:
74
  impact_idx = len(ball_positions) - 1 # Fallback to last detection
75
-
76
- pitch_point = ball_positions[0]
77
  impact_point = ball_positions[impact_idx]
78
 
79
  # Use positions up to impact for interpolation
@@ -132,51 +139,59 @@ def generate_slow_motion(frames, trajectory, pitch_point, impact_point, detectio
132
 
133
  trajectory_points = np.array(trajectory[:len(detection_frames)], dtype=np.int32).reshape((-1, 1, 2))
134
 
 
 
 
135
  for i, frame in enumerate(frames):
136
  # Draw trajectory (blue line) only for frames with detections
137
  if i in detection_frames and trajectory_points.size > 0:
138
  cv2.polylines(frame, [trajectory_points[:detection_frames.index(i) + 1]], False, (255, 0, 0), 2)
139
 
140
- # Draw pitch point (red circle with label) when the ball touches the ground (y < ground threshold)
141
- if pitch_point and impact_point and i >= detection_frames[0]:
142
  x, y = pitch_point
143
- if y > frame.shape[0] * 0.75: # Threshold for ground contact (adjust as necessary)
144
- cv2.circle(frame, (int(x), int(y)), 8, (0, 0, 255), -1)
145
- cv2.putText(frame, "Pitch Point", (int(x) + 10, int(y) - 10),
146
- cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 2)
147
-
148
- # Draw impact point (yellow circle with label) when ball is near stumps (y near bottom)
149
- if impact_point and i >= detection_frames[min(len(detection_frames) - 1, detection_frames.index(detection_frames[-1]))]:
 
 
150
  x, y = impact_point
151
- if y > frame.shape[0] * 0.85: # Threshold for impact (adjust as necessary)
152
- cv2.circle(frame, (int(x), int(y)), 8, (0, 255, 255), -1)
153
- cv2.putText(frame, "Impact Point", (int(x) + 10, int(y) + 20),
154
- cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 255), 2)
155
-
 
 
 
156
  for _ in range(SLOW_MOTION_FACTOR):
157
  out.write(frame)
 
158
  out.release()
159
  return output_path
160
 
161
  def drs_review(video):
162
  frames, ball_positions, detection_frames, debug_log = process_video(video)
163
  if not frames:
164
- return f"Error: Failed to process video\nDebug Log:\n{debug_log}", None
165
  trajectory, pitch_point, impact_point, trajectory_log = estimate_trajectory(ball_positions, frames)
166
  decision, trajectory, pitch_point, impact_point = lbw_decision(ball_positions, trajectory, frames, pitch_point, impact_point)
167
 
168
  output_path = f"output_{uuid.uuid4()}.mp4"
169
  slow_motion_path = generate_slow_motion(frames, trajectory, pitch_point, impact_point, detection_frames, output_path)
170
 
171
- debug_output = f"{debug_log}\n{trajectory_log}"
172
- return f"DRS Decision: {decision}\nDebug Log:\n{debug_output}", slow_motion_path
173
 
174
  # Gradio interface
175
  iface = gr.Interface(
176
  fn=drs_review,
177
  inputs=gr.Video(label="Upload Video Clip"),
178
  outputs=[
179
- gr.Textbox(label="DRS Decision and Debug Log"),
180
  gr.Video(label="Slow-Motion Replay with Ball Detection (Green), Trajectory (Blue Line), Pitch Point (Red), Impact Point (Yellow)")
181
  ],
182
  title="AI-Powered DRS for LBW in Local Cricket",
 
57
  def estimate_trajectory(ball_positions, frames):
58
  if len(ball_positions) < 2:
59
  return None, None, None, "Error: Fewer than 2 ball detections for trajectory"
60
+
61
  frame_height = frames[0].shape[0]
62
+
63
  # Extract x, y coordinates
64
  x_coords = [pos[0] for pos in ball_positions]
65
  y_coords = [pos[1] for pos in ball_positions]
66
  times = np.arange(len(ball_positions)) / FRAME_RATE
67
 
68
+ # Detect the pitch point: find when the ball touches the ground
69
+ pitch_point = None
70
+ for i, y in enumerate(y_coords):
71
+ if y > frame_height * 0.75: # Threshold for ground contact (near the bottom of the frame)
72
+ pitch_point = ball_positions[i]
73
+ break
74
+
75
  # Find impact point (closest to batsman, near stumps)
76
  impact_idx = None
77
  for i, y in enumerate(y_coords):
 
80
  break
81
  if impact_idx is None:
82
  impact_idx = len(ball_positions) - 1 # Fallback to last detection
83
+
 
84
  impact_point = ball_positions[impact_idx]
85
 
86
  # Use positions up to impact for interpolation
 
139
 
140
  trajectory_points = np.array(trajectory[:len(detection_frames)], dtype=np.int32).reshape((-1, 1, 2))
141
 
142
+ pitch_point_detected = False
143
+ impact_point_detected = False
144
+
145
  for i, frame in enumerate(frames):
146
  # Draw trajectory (blue line) only for frames with detections
147
  if i in detection_frames and trajectory_points.size > 0:
148
  cv2.polylines(frame, [trajectory_points[:detection_frames.index(i) + 1]], False, (255, 0, 0), 2)
149
 
150
+ # Draw pitch point (red circle with label) when the ball touches the ground
151
+ if pitch_point and not pitch_point_detected:
152
  x, y = pitch_point
153
+ if y > frame.shape[0] * 0.75: # Adjust this threshold for the ground position
154
+ pitch_point_detected = True
155
+ if pitch_point_detected:
156
+ cv2.circle(frame, (int(x), int(y)), 8, (0, 0, 255), -1)
157
+ cv2.putText(frame, "Pitch Point", (int(x) + 10, int(y) - 10),
158
+ cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 2)
159
+
160
+ # Draw impact point (yellow circle with label) when ball is near stumps
161
+ if impact_point and not impact_point_detected:
162
  x, y = impact_point
163
+ if y > frame.shape[0] * 0.85: # Adjust this threshold for impact point
164
+ impact_point_detected = True
165
+ if impact_point_detected:
166
+ cv2.circle(frame, (int(x), int(y)), 8, (0, 255, 255), -1)
167
+ cv2.putText(frame, "Impact Point", (int(x) + 10, int(y) + 20),
168
+ cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 255), 2)
169
+
170
+ # Write frames to output video
171
  for _ in range(SLOW_MOTION_FACTOR):
172
  out.write(frame)
173
+
174
  out.release()
175
  return output_path
176
 
177
  def drs_review(video):
178
  frames, ball_positions, detection_frames, debug_log = process_video(video)
179
  if not frames:
180
+ return f"Error: Failed to process video", None
181
  trajectory, pitch_point, impact_point, trajectory_log = estimate_trajectory(ball_positions, frames)
182
  decision, trajectory, pitch_point, impact_point = lbw_decision(ball_positions, trajectory, frames, pitch_point, impact_point)
183
 
184
  output_path = f"output_{uuid.uuid4()}.mp4"
185
  slow_motion_path = generate_slow_motion(frames, trajectory, pitch_point, impact_point, detection_frames, output_path)
186
 
187
+ return f"DRS Decision: {decision}", slow_motion_path
 
188
 
189
  # Gradio interface
190
  iface = gr.Interface(
191
  fn=drs_review,
192
  inputs=gr.Video(label="Upload Video Clip"),
193
  outputs=[
194
+ gr.Textbox(label="DRS Decision"),
195
  gr.Video(label="Slow-Motion Replay with Ball Detection (Green), Trajectory (Blue Line), Pitch Point (Red), Impact Point (Yellow)")
196
  ],
197
  title="AI-Powered DRS for LBW in Local Cricket",