AjaykumarPilla commited on
Commit
abc7c13
·
verified ·
1 Parent(s): 7f15ea1

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +148 -0
app.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import cv2
3
+ import numpy as np
4
+ from ultralytics import YOLO
5
+ import tensorflow as tf
6
+ import math
7
+ import os
8
+ from pathlib import Path
9
+
10
+ # Configuration
11
+ PITCH_LENGTH = 20.12 # Pitch length in meters
12
+ PITCH_WIDTH = 3.05 # Pitch width in meters
13
+ STUMP_HEIGHT = 0.71 # Stump height in meters
14
+ STUMP_WIDTH = 0.23 # Stump width in meters
15
+ FPS = 60 # Frames per second of video
16
+ OUTPUT_DIR = "outputs"
17
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
18
+
19
+ # Load fine-tuned YOLOv8 model
20
+ model = YOLO('best.pt')
21
+
22
+ def pixel_to_world(x, y, frame_width, frame_height):
23
+ world_x = -PITCH_LENGTH/2 + (x / frame_width) * PITCH_LENGTH
24
+ world_z = -(y / frame_height) * PITCH_WIDTH
25
+ world_y = 0.5 # Assume ball height
26
+ return world_x, world_y, world_z
27
+
28
+ def predict_trajectory(positions):
29
+ if len(positions) < 2:
30
+ return None
31
+ x = np.array([p[0] for p in positions])
32
+ y = np.array([p[1] for p in positions])
33
+ z = np.array([p[2] for p in positions])
34
+ t = np.arange(len(positions))
35
+
36
+ px = np.polyfit(t, x, 1)
37
+ py = np.polyfit(t, y, 1)
38
+ pz = np.polyfit(t, z, 1)
39
+
40
+ t_stump = (PITCH_LENGTH/2 - x[-1]) / px[0] if px[0] != 0 else 0
41
+ pred_y = py[0] * t_stump + y[-1]
42
+ pred_z = pz[0] * t_stump + z[-1]
43
+ return pred_y, pred_z
44
+
45
+ def lbw_decision(pitch_impact, impact_point, stump_prediction):
46
+ pitching_in_line = -STUMP_WIDTH/2 <= pitch_impact[2] <= STUMP_WIDTH/2
47
+ impact_in_line = -STUMP_WIDTH/2 <= impact_point[2] <= STUMP_WIDTH/2 if impact_point else True
48
+ hitting_stumps = (stump_prediction is not None and
49
+ 0 <= stump_prediction[0] <= STUMP_HEIGHT and
50
+ -STUMP_WIDTH/2 <= stump_prediction[1] <= STUMP_WIDTH/2)
51
+
52
+ if pitching_in_line and impact_in_line and hitting_stumps:
53
+ return "OUT", "Ball pitched in line, hit in line, would hit stumps."
54
+ else:
55
+ reasons = []
56
+ if not pitching_in_line:
57
+ reasons.append("Ball pitched outside line.")
58
+ if not impact_in_line:
59
+ reasons.append("Impact outside line.")
60
+ if not hitting_stumps:
61
+ reasons.append("Ball would not hit stumps.")
62
+ return "NOT OUT", " ".join(reasons)
63
+
64
+ def process_video(video_path):
65
+ # Initialize video capture
66
+ cap = cv2.VideoCapture(video_path)
67
+ if not cap.isOpened():
68
+ return "Error: Could not open video.", "", ""
69
+
70
+ frame_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
71
+ frame_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
72
+ output_path = os.path.join(OUTPUT_DIR, "output_video.mp4")
73
+ fourcc = cv2.VideoWriter_fourcc(*'mp4v')
74
+ out = cv2.VideoWriter(output_path, fourcc, FPS, (frame_width, frame_height))
75
+
76
+ ball_positions = []
77
+ pitch_impact = None
78
+ impact_point = None
79
+ frame_count = 0
80
+
81
+ while cap.isOpened():
82
+ ret, frame = cap.read()
83
+ if not ret:
84
+ break
85
+
86
+ frame_count += 1
87
+ results = model(frame, conf=0.3)
88
+ ball_detected = False
89
+
90
+ for result in results:
91
+ boxes = result.boxes.xyxy.numpy()
92
+ for box in boxes:
93
+ x1, y1, x2, y2 = box[:4]
94
+ center_x, center_y = (x1 + x2) / 2, (y1 + y2) / 2
95
+
96
+ world_x, world_y, world_z = pixel_to_world(center_x, center_y, frame_width, frame_height)
97
+ ball_positions.append((world_x, world_y, world_z))
98
+ ball_detected = True
99
+
100
+ cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (0, 255, 0), 2)
101
+
102
+ if world_y < 0.2 and pitch_impact is None:
103
+ pitch_impact = (world_x, world_y, world_z)
104
+ cv2.circle(frame, (int(center_x), int(center_y)), 5, (0, 0, 255), -1)
105
+ cv2.putText(frame, "Pitch Impact", (int(center_x), int(center_y) - 10),
106
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
107
+
108
+ if pitch_impact is not None and world_x > PITCH_LENGTH/2 - 1 and impact_point is None:
109
+ impact_point = (world_x, world_y, world_z)
110
+ cv2.circle(frame, (int(center_x), int(center_y)), 5, (255, 0, 0), -1)
111
+ cv2.putText(frame, "Batsman Impact", (int(center_x), int(center_y) - 10),
112
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 2)
113
+ break
114
+
115
+ if not ball_detected and pitch_impact is not None and impact_point is None:
116
+ impact_point = None
117
+
118
+ out.write(frame)
119
+
120
+ stump_prediction = predict_trajectory(ball_positions) if ball_positions else None
121
+ if stump_prediction:
122
+ pred_y, pred_z = stump_prediction
123
+ cv2.putText(frame, f"Predicted Stump Hit: ({pred_y:.2f}, {pred_z:.2f})",
124
+ (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 0), 2)
125
+ out.write(frame)
126
+
127
+ decision, reason = lbw_decision(pitch_impact, impact_point, stump_prediction)
128
+
129
+ cap.release()
130
+ out.release()
131
+
132
+ return decision, reason, output_path
133
+
134
+ # Gradio interface
135
+ iface = gr.Interface(
136
+ fn=process_video,
137
+ inputs=gr.Video(label="Upload Cricket Delivery Video"),
138
+ outputs=[
139
+ gr.Textbox(label="LBW Decision"),
140
+ gr.Textbox(label="Reason"),
141
+ gr.Video(label="Processed Video with Annotations")
142
+ ],
143
+ title="Gully Cricket DRS for LBW",
144
+ description="Upload a video of a cricket delivery to analyze LBW decisions."
145
+ )
146
+
147
+ if __name__ == "__main__":
148
+ iface.launch()