dschandra commited on
Commit
7fd1105
·
verified ·
1 Parent(s): 49a3a3a

Create local_process.py

Browse files
Files changed (1) hide show
  1. local_process.py +116 -0
local_process.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import numpy as np
3
+ import requests
4
+ from scipy.interpolate import splprep, splev
5
+
6
+ # Camera setup (replace with your camera indices or IP streams)
7
+ caps = [cv2.VideoCapture(0)] # Add more cameras as needed
8
+
9
+ def smooth_trajectory(points):
10
+ if len(points) < 3:
11
+ return points
12
+ x = [p["x"] for p in points]
13
+ y = [p["y"] for p in points]
14
+ tck, u = splprep([x, y], s=0)
15
+ u_new = np.linspace(0, 1, 50)
16
+ x_new, y_new = splev(u_new, tck)
17
+ return [{"x": x, "y": y} for x, y in zip(x_new, y_new)]
18
+
19
+ def process_frame(frame):
20
+ hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
21
+ mask = cv2.inRange(hsv, (0, 120, 70), (10, 255, 255)) # Adjust for your ball color
22
+ contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
23
+ if contours:
24
+ c = max(contours, key=cv2.contourArea)
25
+ x, y, w, h = cv2.boundingRect(c)
26
+ return x + w / 2, y + h / 2
27
+ return None, None
28
+
29
+ actual_path = []
30
+ y_positions = []
31
+ pitching_detected = False
32
+ impact_detected = False
33
+ last_point = None
34
+ frame_count = 0
35
+ spin = 0
36
+
37
+ while True:
38
+ frames = []
39
+ for cap in caps:
40
+ ret, frame = cap.read()
41
+ if ret:
42
+ frames.append(frame)
43
+
44
+ if not frames:
45
+ break
46
+
47
+ # Process the first camera feed (add logic for multiple cameras)
48
+ frame = frames[0]
49
+ center_x, center_y = process_frame(frame)
50
+ if center_x is not None:
51
+ norm_x = center_x / 1280
52
+ norm_y = center_y / 720
53
+ current_point = (norm_x, norm_y)
54
+
55
+ if last_point != current_point:
56
+ actual_path.append({"x": norm_x, "y": norm_y})
57
+ y_positions.append(norm_y)
58
+ last_point = current_point
59
+
60
+ if len(y_positions) > 2 and not pitching_detected:
61
+ if y_positions[-1] < y_positions[-2] and y_positions[-2] < y_positions[-3]:
62
+ pitching_detected = True
63
+ pitching_x = actual_path[-2]["x"]
64
+ pitching_y = actual_path[-2]["y"]
65
+
66
+ if len(actual_path) > 2 and not impact_detected:
67
+ speed_current = abs(y_positions[-1] - y_positions[-2])
68
+ speed_prev = abs(y_positions[-2] - y_positions[-3])
69
+ if speed_current < speed_prev * 0.3:
70
+ impact_detected = True
71
+ impact_x = actual_path[-1]["x"]
72
+ impact_y = actual_path[-1]["y"]
73
+
74
+ frame_count += 1
75
+ if impact_detected or frame_count > 50:
76
+ break
77
+
78
+ cv2.imshow('Frame', frame)
79
+ if cv2.waitKey(1) & 0xFF == ord('q'):
80
+ break
81
+
82
+ for cap in caps:
83
+ cap.release()
84
+ cv2.destroyAllWindows()
85
+
86
+ if not actual_path:
87
+ print("No ball detected")
88
+ exit()
89
+
90
+ if not pitching_detected:
91
+ pitching_x = actual_path[len(actual_path)//2]["x"]
92
+ pitching_y = actual_path[len(actual_path)//2]["y"]
93
+
94
+ if not impact_detected:
95
+ impact_x = actual_path[-1]["x"]
96
+ impact_y = actual_path[-1]["y"]
97
+
98
+ actual_path = smooth_trajectory(actual_path)
99
+ projected_path = [
100
+ {"x": impact_x, "y": impact_y},
101
+ {"x": impact_x + spin * 0.1, "y": 1.0}
102
+ ]
103
+
104
+ # Send data to Hugging Face app
105
+ data = {
106
+ 'actual_path': actual_path,
107
+ 'projected_path': projected_path,
108
+ 'pitching': {'x': pitching_x, 'y': pitching_y},
109
+ 'impact': {'x': impact_x, 'y': impact_y},
110
+ 'speed': frame_count / 30 * 0.5, # Rough speed estimate
111
+ 'spin': spin
112
+ }
113
+
114
+ # Replace with your Hugging Face Space URL
115
+ response = requests.post('https://your-username-cricket-lbw-analyzer.hf.space/analyze_data', json=data)
116
+ print(response.json())