AjaykumarPilla commited on
Commit
ecfb2f8
·
verified ·
1 Parent(s): 03f512c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -16
app.py CHANGED
@@ -15,16 +15,16 @@ model.to('cuda' if torch.cuda.is_available() else 'cpu') # Use GPU if available
15
  # Constants for LBW decision and video processing
16
  STUMPS_WIDTH = 0.2286 # meters (width of stumps)
17
  BALL_DIAMETER = 0.073 # meters (approx. cricket ball diameter)
18
- FRAME_RATE = 20 # Input video frame rate (to be updated dynamically)
19
  SLOW_MOTION_FACTOR = 3 # For very slow motion (3x slower)
20
- CONF_THRESHOLD = 0.4 # Increased confidence threshold for better detection
21
- IMPACT_ZONE_Y = 0.8 # Adjusted fraction of frame height for impact zone
22
  IMPACT_VELOCITY_THRESHOLD = 1000 # Pixels/second for detecting impact
23
  PITCH_LENGTH = 20.12 # meters (standard cricket pitch length)
24
  STUMPS_HEIGHT = 0.71 # meters (stumps height)
25
  CAMERA_HEIGHT = 2.0 # meters (assumed camera height)
26
  CAMERA_DISTANCE = 10.0 # meters (assumed camera distance from pitch)
27
- MAX_POSITION_JUMP = 50 # Increased for smoother trajectory
28
 
29
  def process_video(video_path):
30
  if not os.path.exists(video_path):
@@ -34,6 +34,10 @@ def process_video(video_path):
34
  frame_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
35
  frame_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
36
  FRAME_RATE = cap.get(cv2.CAP_PROP_FPS) or 20 # Use actual frame rate or default
 
 
 
 
37
  frames = []
38
  ball_positions = []
39
  detection_frames = []
@@ -46,15 +50,21 @@ def process_video(video_path):
46
  break
47
  frame_count += 1
48
  frames.append(frame.copy())
49
- # Enhance frame contrast for better detection
50
- frame = cv2.convertScaleAbs(frame, alpha=1.2, beta=10)
51
- results = model.predict(frame, conf=CONF_THRESHOLD, imgsz=(frame_height, frame_width), iou=0.5, max_det=1)
52
- detections = 0
53
- for detection in results[0].boxes:
54
- if detection.cls == 0: # Class 0 is the ball
55
- detections += 1
56
- if detections == 1: # Only consider frames with exactly one detection
 
57
  x1, y1, x2, y2 = detection.xyxy[0].cpu().numpy()
 
 
 
 
 
58
  ball_positions.append([(x1 + x2) / 2, (y1 + y2) / 2])
59
  detection_frames.append(frame_count - 1)
60
  cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (0, 255, 0), 2)
@@ -65,9 +75,9 @@ def process_video(video_path):
65
  cap.release()
66
 
67
  if not ball_positions:
68
- debug_log.append("No balls detected in any frame")
69
  else:
70
- debug_log.append(f"Total ball detections: {len(ball_positions)}")
71
  debug_log.append(f"Video resolution: {frame_width}x{frame_height}")
72
  debug_log.append(f"Video frame rate: {FRAME_RATE}")
73
 
@@ -84,7 +94,7 @@ def pixel_to_3d(x, y, frame_height, frame_width):
84
 
85
  def estimate_trajectory(ball_positions, frames, detection_frames):
86
  if len(ball_positions) < 2:
87
- return None, None, None, None, None, None, None, None, None, "Error: Fewer than 2 ball detections for trajectory"
88
  frame_height, frame_width = frames[0].shape[:2]
89
 
90
  # Filter out sudden changes in position for continuous trajectory
@@ -92,7 +102,7 @@ def estimate_trajectory(ball_positions, frames, detection_frames):
92
  filtered_frames = [detection_frames[0]]
93
  for i in range(1, len(ball_positions)):
94
  prev_pos = filtered_positions[-1]
95
- curr_pos = box_positions[i]
96
  distance = np.sqrt((curr_pos[0] - prev_pos[0])**2 + (curr_pos[1] - prev_pos[1])**2)
97
  if distance <= MAX_POSITION_JUMP:
98
  filtered_positions.append(curr_pos)
 
15
  # Constants for LBW decision and video processing
16
  STUMPS_WIDTH = 0.2286 # meters (width of stumps)
17
  BALL_DIAMETER = 0.073 # meters (approx. cricket ball diameter)
18
+ FRAME_RATE = 20 # Default frame rate, updated dynamically
19
  SLOW_MOTION_FACTOR = 3 # For very slow motion (3x slower)
20
+ CONF_THRESHOLD = 0.25 # Lowered further to improve detection chances
21
+ IMPACT_ZONE_Y = 0.8 # Fraction of frame height for impact zone
22
  IMPACT_VELOCITY_THRESHOLD = 1000 # Pixels/second for detecting impact
23
  PITCH_LENGTH = 20.12 # meters (standard cricket pitch length)
24
  STUMPS_HEIGHT = 0.71 # meters (stumps height)
25
  CAMERA_HEIGHT = 2.0 # meters (assumed camera height)
26
  CAMERA_DISTANCE = 10.0 # meters (assumed camera distance from pitch)
27
+ MAX_POSITION_JUMP = 50 # For smoother trajectory filtering
28
 
29
  def process_video(video_path):
30
  if not os.path.exists(video_path):
 
34
  frame_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
35
  frame_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
36
  FRAME_RATE = cap.get(cv2.CAP_PROP_FPS) or 20 # Use actual frame rate or default
37
+ # Adjust image size to be multiple of 32 for YOLO
38
+ stride = 32
39
+ img_width = ((frame_width + stride - 1) // stride) * stride
40
+ img_height = ((frame_height + stride - 1) // stride) * stride
41
  frames = []
42
  ball_positions = []
43
  detection_frames = []
 
50
  break
51
  frame_count += 1
52
  frames.append(frame.copy())
53
+ # Enhance frame contrast and sharpness for better detection
54
+ frame = cv2.convertScaleAbs(frame, alpha=1.5, beta=20)
55
+ kernel = np.array([[-1, -1, -1], [-1, 9, -1], [-1, -1, -1]])
56
+ frame = cv2.filter2D(frame, -1, kernel)
57
+ results = model.predict(frame, conf=CONF_THRESHOLD, imgsz=(img_height, img_width), iou=0.5, max_det=3)
58
+ detections = sum(1 for detection in results[0].boxes if detection.cls == 0)
59
+ if detections == 1: # Only process frames with exactly one ball detection
60
+ for detection in results[0].boxes:
61
+ if detection.cls == 0: # Class 0 is the ball
62
  x1, y1, x2, y2 = detection.xyxy[0].cpu().numpy()
63
+ # Scale coordinates back to original frame size
64
+ x1 = x1 * frame_width / img_width
65
+ x2 = x2 * frame_width / img_width
66
+ y1 = y1 * frame_height / img_height
67
+ y2 = y2 * frame_height / img_height
68
  ball_positions.append([(x1 + x2) / 2, (y1 + y2) / 2])
69
  detection_frames.append(frame_count - 1)
70
  cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (0, 255, 0), 2)
 
75
  cap.release()
76
 
77
  if not ball_positions:
78
+ debug_log.append("No frames with exactly one ball detection")
79
  else:
80
+ debug_log.append(f"Total frames with one ball detection: {len(ball_positions)}")
81
  debug_log.append(f"Video resolution: {frame_width}x{frame_height}")
82
  debug_log.append(f"Video frame rate: {FRAME_RATE}")
83
 
 
94
 
95
  def estimate_trajectory(ball_positions, frames, detection_frames):
96
  if len(ball_positions) < 2:
97
+ return None, None, None, None, None, None, None, None, None, "Error: Fewer than 2 frames with one ball detection"
98
  frame_height, frame_width = frames[0].shape[:2]
99
 
100
  # Filter out sudden changes in position for continuous trajectory
 
102
  filtered_frames = [detection_frames[0]]
103
  for i in range(1, len(ball_positions)):
104
  prev_pos = filtered_positions[-1]
105
+ curr_pos = ball_positions[i]
106
  distance = np.sqrt((curr_pos[0] - prev_pos[0])**2 + (curr_pos[1] - prev_pos[1])**2)
107
  if distance <= MAX_POSITION_JUMP:
108
  filtered_positions.append(curr_pos)