dschandra commited on
Commit
94fc71e
·
verified ·
1 Parent(s): 68f71c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +148 -34
app.py CHANGED
@@ -1,19 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from flask import Flask, render_template, request, jsonify
2
  import numpy as np
3
  from sklearn.linear_model import LogisticRegression
4
  import cv2
5
  import os
6
  from werkzeug.utils import secure_filename
 
7
 
8
  app = Flask(__name__)
9
 
10
- # Configure upload folder to use /tmp (writable on Hugging Face Spaces)
11
  UPLOAD_FOLDER = '/tmp/uploads'
12
- os.makedirs(UPLOAD_FOLDER, exist_ok=True) # This should now work in /tmp
13
  app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
14
  ALLOWED_EXTENSIONS = {'mp4', 'avi', 'mov'}
15
 
16
- # Dummy ML model for LBW decision
17
  def train_dummy_model():
18
  X = np.array([
19
  [0.5, 0.0, 0.4, 0.5, 30, 0], # Not Out
@@ -28,30 +108,39 @@ def train_dummy_model():
28
 
29
  model = train_dummy_model()
30
 
31
- # Check allowed file extensions
32
  def allowed_file(filename):
33
  return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
34
 
35
- # Process video to extract ball trajectory
 
 
 
 
 
 
 
 
 
36
  def process_video(video_path):
37
  cap = cv2.VideoCapture(video_path)
38
  if not cap.isOpened():
39
  return None, None, "Failed to open video"
40
 
41
- # Lists to store trajectory points
42
  actual_path = []
43
  frame_count = 0
44
- total_speed = 0
45
- spin = 0 # Simplified: Assume no spin for now
 
 
 
46
 
47
  while cap.isOpened():
48
  ret, frame = cap.read()
49
  if not ret:
50
  break
51
 
52
- # Convert to HSV and detect ball (assuming a red ball)
53
  hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
54
- mask = cv2.inRange(hsv, (0, 120, 70), (10, 255, 255))
55
  contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
56
 
57
  if contours:
@@ -59,14 +148,33 @@ def process_video(video_path):
59
  x, y, w, h = cv2.boundingRect(c)
60
  center_x = x + w / 2
61
  center_y = y + h / 2
62
-
63
- # Normalize coordinates to 0-1 (assuming 1280x720 video resolution)
64
  norm_x = center_x / 1280
65
  norm_y = center_y / 720
66
- actual_path.append({"x": norm_x, "y": norm_y})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
  frame_count += 1
69
- if frame_count > 30: # Process first 30 frames for simplicity
70
  break
71
 
72
  cap.release()
@@ -74,23 +182,32 @@ def process_video(video_path):
74
  if not actual_path:
75
  return None, None, "No ball detected in video"
76
 
77
- # Assume last point is impact, calculate pitching as midpoint
78
- pitching_x = actual_path[len(actual_path)//2]["x"]
79
- pitching_y = actual_path[len(actual_path)//2]["y"]
80
- impact_x = actual_path[-1]["x"]
81
- impact_y = actual_path[-1]["y"]
 
 
82
 
83
- # Simulate speed (frames per second to m/s, rough estimate)
84
  fps = cap.get(cv2.CAP_PROP_FPS) or 30
85
- speed = (len(actual_path) / (frame_count / fps)) * 0.5 # Simplified conversion
86
 
87
- # Projected path (linear from impact to stumps, adjusted for spin)
 
 
 
88
  projected_path = [
89
  {"x": impact_x, "y": impact_y},
90
- {"x": impact_x + spin * 0.1, "y": 1.0} # Stumps at y=1.0
91
  ]
92
 
93
- return actual_path, projected_path, pitching_x, pitching_y, impact_x, impact_y, speed, spin
 
 
 
 
 
94
 
95
  @app.route('/')
96
  def index():
@@ -105,26 +222,22 @@ def analyze():
105
  if file.filename == '' or not allowed_file(file.filename):
106
  return jsonify({'error': 'Invalid file'}), 400
107
 
108
- # Save the uploaded video
109
  filename = secure_filename(file.filename)
110
  video_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)
111
  file.save(video_path)
112
 
113
- # Process video
114
  result = process_video(video_path)
115
  if result[0] is None:
116
- os.remove(video_path) # Clean up
117
- return jsonify({'error': result[2]}), 400 # result[2] holds error message
118
 
119
- actual_path, projected_path, pitching_x, pitching_y, impact_x, impact_y, speed, spin = result
120
 
121
- # Predict LBW decision
122
  features = np.array([[pitching_x, pitching_y, impact_x, impact_y, speed, spin]])
123
  prediction = model.predict(features)[0]
124
- confidence = model.predict_proba(features)[0][prediction]
125
  decision = "Out" if prediction == 1 else "Not Out"
126
 
127
- # Clean up
128
  os.remove(video_path)
129
 
130
  return jsonify({
@@ -132,8 +245,9 @@ def analyze():
132
  'projected_path': projected_path,
133
  'decision': decision,
134
  'confidence': round(confidence, 2),
135
- 'pitching': {'x': pitching_x, 'y': pitching_y},
136
- 'impact': {'x': impact_x, 'y': impact_y}
 
137
  })
138
 
139
  if __name__ == '__main__':
 
1
+ I understand your concern. You're expecting a real-time DRS (Decision Review System) output similar to what you see in international T20 cricket matches, with precise ball tracking, pitch analysis, and wicket-hitting prediction, as used by third umpires. The current output—while functional—falls short of that professional standard in terms of accuracy, visualization, and real-time capability. Let’s break down the gap between your expectations and the current output, then outline how to achieve a more professional DRS-like system.
2
+
3
+ Understanding Your Expectation
4
+ In international T20 matches, the DRS (e.g., Hawk-Eye) provides:
5
+
6
+ Real-Time Ball Tracking:
7
+ High-speed cameras (e.g., 6–8 cameras at 500+ fps) track the ball from the bowler’s release to impact.
8
+ A smooth 3D trajectory is shown, often as a curved path, accounting for swing, spin, and bounce.
9
+ Pitch Analysis:
10
+ The system identifies the exact pitching point (where the ball bounces) and checks if it’s in line with the stumps (inside, outside leg, or outside off).
11
+ A 2D or 3D pitch map shows the pitching point relative to the stumps.
12
+ Wicket-Hitting Prediction:
13
+ The system predicts the ball’s path after impact (e.g., hitting the pad) to determine if it would hit the stumps.
14
+ A 3D visualization shows the projected path, often with a “stump cam” view indicating whether the ball would hit, miss, or clip the stumps.
15
+ Detailed Decision:
16
+ The DRS provides a step-by-step breakdown: pitching (inline/outside), impact (inline/outside), and wicket-hitting (yes/no).
17
+ A final decision (“Out” or “Not Out”) is shown with high accuracy (99.9% in professional systems).
18
+ Professional Visualization:
19
+ Smooth animations of the ball’s path (curved, not linear).
20
+ 3D renderings of the pitch, stumps, and trajectory.
21
+ Clear annotations (e.g., “Ball Pitching: Inline,” “Impact: Inline,” “Wickets: Hitting”).
22
+ The current app output, while a starting point, has these shortcomings:
23
+
24
+ Simplified Tracking: It uses basic OpenCV to detect the ball in a pre-uploaded video, not real-time camera feeds.
25
+ Linear Trajectories: The red (actual) and blue (projected) paths are linear, not curved, and don’t account for swing or spin.
26
+ Basic Pitch Analysis: Pitching and impact points are estimated roughly, not precisely detected.
27
+ 2D Visualization: The visualization is a flat 2D canvas, lacking the 3D depth and smoothness of professional DRS.
28
+ Dummy ML Model: The LBW decision uses a simplistic logistic regression model, not a robust ML system trained on real cricket data.
29
+ Not Real-Time: The app processes uploaded videos, not live camera feeds.
30
+ How to Achieve a Real-Time DRS-Like System
31
+ To meet your expectations, we need to upgrade the app to handle real-time ball tracking, accurate pitch analysis, wicket-hitting prediction, and professional-grade visualization. Here’s a step-by-step plan, followed by updated code to move closer to your goal.
32
+
33
+ Key Requirements for a DRS-Like System
34
+ Real-Time Camera Integration:
35
+ Use multiple high-speed cameras (e.g., 120–240 fps) to capture the ball’s movement live.
36
+ Process camera feeds in real-time to track the ball.
37
+ Note: Hugging Face Spaces cannot access cameras (no webcam support), so this must be done locally or on a server with camera access.
38
+ Advanced Ball Tracking:
39
+ Use computer vision (OpenCV) to detect the ball in each frame.
40
+ Apply trajectory smoothing (e.g., spline interpolation) to create a curved path.
41
+ Detect swing (lateral deviation) and spin (rotation).
42
+ Pitch and Impact Detection:
43
+ Identify the pitching point by detecting the ball’s bounce (sudden change in y-coordinate).
44
+ Detect the impact point by identifying when the ball stops (e.g., hits the pad, often with a sudden slowdown).
45
+ Use pitch markings (e.g., creases, stumps) to determine if pitching/impact is inline.
46
+ Wicket-Hitting Prediction:
47
+ Model the ball’s physics (swing, spin, bounce) to predict the post-impact path.
48
+ Use an ML model to refine predictions based on historical data.
49
+ Professional Visualization:
50
+ Use a 3D rendering library (e.g., Three.js) for realistic trajectory visualization.
51
+ Show a detailed breakdown (pitching, impact, wicket-hitting) with annotations.
52
+ Robust ML Model:
53
+ Train a deep learning model (e.g., CNN) on real cricket video data to predict LBW outcomes.
54
+ Host the model on Hugging Face Model Hub for inference.
55
+ Challenges
56
+ Hardware: Professional DRS uses 6–8 high-speed cameras ($100,000+ setup). For a playground, 2–4 consumer cameras (e.g., GoPro, $500–$2,000 each) can work but reduce accuracy.
57
+ Real-Time Processing: Processing multiple camera feeds in real-time requires significant compute power (e.g., GPU server). Hugging Face Spaces (free tier: 2 vCPUs, 8GB RAM) can’t handle this; you’ll need a local setup or cloud server (e.g., AWS).
58
+ Data: Training an ML model for LBW requires labeled cricket video data (scarce publicly). You may need to collect and annotate your own dataset.
59
+ Visualization: 3D rendering (e.g., with Three.js) is more complex than the current 2D Canvas and requires additional setup.
60
+ Plan to Achieve DRS-Like Output
61
+ Since real-time camera integration and 3D visualization are complex, let’s break this into phases. For now, we’ll enhance the app to:
62
+
63
+ Improve ball tracking with smoother trajectories.
64
+ Add detailed LBW analysis (pitching, impact, wicket-hitting breakdown).
65
+ Upgrade visualization to show a more professional 2D output (we’ll add 3D later).
66
+ Provide guidance for real-time setup (local or cloud).
67
+ Updated Code
68
+ app.py
69
+ This version improves ball tracking, adds detailed LBW analysis, and prepares for real-time integration.
70
+
71
+ python
72
+
73
+ Collapse
74
+
75
+ Wrap
76
+
77
+ Run
78
+
79
+ Copy
80
  from flask import Flask, render_template, request, jsonify
81
  import numpy as np
82
  from sklearn.linear_model import LogisticRegression
83
  import cv2
84
  import os
85
  from werkzeug.utils import secure_filename
86
+ from scipy.interpolate import splprep, splev # For smooth trajectory
87
 
88
  app = Flask(__name__)
89
 
90
+ # Configure upload folder
91
  UPLOAD_FOLDER = '/tmp/uploads'
92
+ os.makedirs(UPLOAD_FOLDER, exist_ok=True)
93
  app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
94
  ALLOWED_EXTENSIONS = {'mp4', 'avi', 'mov'}
95
 
96
+ # Dummy ML model for LBW decision (to be replaced with a real model)
97
  def train_dummy_model():
98
  X = np.array([
99
  [0.5, 0.0, 0.4, 0.5, 30, 0], # Not Out
 
108
 
109
  model = train_dummy_model()
110
 
 
111
  def allowed_file(filename):
112
  return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
113
 
114
+ def smooth_trajectory(points):
115
+ if len(points) < 3:
116
+ return points
117
+ x = [p["x"] for p in points]
118
+ y = [p["y"] for p in points]
119
+ tck, u = splprep([x, y], s=0)
120
+ u_new = np.linspace(0, 1, 50) # Smooth with 50 points
121
+ x_new, y_new = splev(u_new, tck)
122
+ return [{"x": x, "y": y} for x, y in zip(x_new, y_new)]
123
+
124
  def process_video(video_path):
125
  cap = cv2.VideoCapture(video_path)
126
  if not cap.isOpened():
127
  return None, None, "Failed to open video"
128
 
 
129
  actual_path = []
130
  frame_count = 0
131
+ spin = 0
132
+ last_point = None
133
+ pitching_detected = False
134
+ impact_detected = False
135
+ y_positions = []
136
 
137
  while cap.isOpened():
138
  ret, frame = cap.read()
139
  if not ret:
140
  break
141
 
 
142
  hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
143
+ mask = cv2.inRange(hsv, (0, 120, 70), (10, 255, 255)) # Adjust for your ball color
144
  contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
145
 
146
  if contours:
 
148
  x, y, w, h = cv2.boundingRect(c)
149
  center_x = x + w / 2
150
  center_y = y + h / 2
 
 
151
  norm_x = center_x / 1280
152
  norm_y = center_y / 720
153
+ current_point = (norm_x, norm_y)
154
+
155
+ if last_point != current_point:
156
+ actual_path.append({"x": norm_x, "y": norm_y})
157
+ y_positions.append(norm_y)
158
+ last_point = current_point
159
+
160
+ # Detect pitching (first significant downward movement)
161
+ if len(y_positions) > 2 and not pitching_detected:
162
+ if y_positions[-1] < y_positions[-2] and y_positions[-2] < y_positions[-3]:
163
+ pitching_detected = True
164
+ pitching_x = actual_path[-2]["x"]
165
+ pitching_y = actual_path[-2]["y"]
166
+
167
+ # Detect impact (sudden slowdown or stop)
168
+ if len(actual_path) > 2 and not impact_detected:
169
+ speed_current = abs(y_positions[-1] - y_positions[-2])
170
+ speed_prev = abs(y_positions[-2] - y_positions[-3])
171
+ if speed_current < speed_prev * 0.3: # Significant slowdown
172
+ impact_detected = True
173
+ impact_x = actual_path[-1]["x"]
174
+ impact_y = actual_path[-1]["y"]
175
 
176
  frame_count += 1
177
+ if frame_count > 50: # Process more frames for accuracy
178
  break
179
 
180
  cap.release()
 
182
  if not actual_path:
183
  return None, None, "No ball detected in video"
184
 
185
+ if not pitching_detected:
186
+ pitching_x = actual_path[len(actual_path)//2]["x"]
187
+ pitching_y = actual_path[len(actual_path)//2]["y"]
188
+
189
+ if not impact_detected:
190
+ impact_x = actual_path[-1]["x"]
191
+ impact_y = actual_path[-1]["y"]
192
 
 
193
  fps = cap.get(cv2.CAP_PROP_FPS) or 30
194
+ speed = (len(actual_path) / (frame_count / fps)) * 0.5
195
 
196
+ # Smooth the actual path
197
+ actual_path = smooth_trajectory(actual_path)
198
+
199
+ # Projected path with basic physics (linear for now, add swing/spin later)
200
  projected_path = [
201
  {"x": impact_x, "y": impact_y},
202
+ {"x": impact_x + spin * 0.1, "y": 1.0}
203
  ]
204
 
205
+ # Determine pitching and impact status
206
+ pitching_status = "Inline" if 0.4 <= pitching_x <= 0.6 else "Outside Leg" if pitching_x < 0.4 else "Outside Off"
207
+ impact_status = "Inline" if 0.4 <= impact_x <= 0.6 else "Outside"
208
+ wicket_status = "Hitting" if 0.4 <= projected_path[-1]["x"] <= 0.6 else "Missing"
209
+
210
+ return actual_path, projected_path, pitching_x, pitching_y, impact_x, impact_y, speed, spin, pitching_status, impact_status, wicket_status
211
 
212
  @app.route('/')
213
  def index():
 
222
  if file.filename == '' or not allowed_file(file.filename):
223
  return jsonify({'error': 'Invalid file'}), 400
224
 
 
225
  filename = secure_filename(file.filename)
226
  video_path = os.path.join(app.config['UPLOAD_FOLDER'], filename)
227
  file.save(video_path)
228
 
 
229
  result = process_video(video_path)
230
  if result[0] is None:
231
+ os.remove(video_path)
232
+ return jsonify({'error': result[2]}), 400
233
 
234
+ actual_path, projected_path, pitching_x, pitching_y, impact_x, impact_y, speed, spin, pitching_status, impact_status, wicket_status = result
235
 
 
236
  features = np.array([[pitching_x, pitching_y, impact_x, impact_y, speed, spin]])
237
  prediction = model.predict(features)[0]
238
+ confidence = min(model.predict_proba(features)[0][prediction], 0.99)
239
  decision = "Out" if prediction == 1 else "Not Out"
240
 
 
241
  os.remove(video_path)
242
 
243
  return jsonify({
 
245
  'projected_path': projected_path,
246
  'decision': decision,
247
  'confidence': round(confidence, 2),
248
+ 'pitching': {'x': pitching_x, 'y': pitching_y, 'status': pitching_status},
249
+ 'impact': {'x': impact_x, 'y': impact_y, 'status': impact_status},
250
+ 'wicket': wicket_status
251
  })
252
 
253
  if __name__ == '__main__':