Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -59,17 +59,20 @@ class VideoProcessor:
|
|
59 |
self.press_stage = None
|
60 |
self.squat_stage = None
|
61 |
self.pose = mp_pose.Pose(min_detection_confidence=0.5, min_tracking_confidence=0.5)
|
62 |
-
|
63 |
-
|
64 |
def process_video(self, video_file):
|
65 |
# Get the filename from the file object
|
66 |
filename = "temp_video.mp4"
|
67 |
# Create a temporary file to write the contents of the uploaded video file
|
68 |
with open(filename, 'wb') as temp_file:
|
69 |
temp_file.write(video_file.read())
|
70 |
-
|
|
|
|
|
71 |
cap = cv2.VideoCapture(filename)
|
72 |
-
|
|
|
|
|
73 |
while cap.isOpened():
|
74 |
ret, frame = cap.read()
|
75 |
if not ret:
|
@@ -77,11 +80,15 @@ class VideoProcessor:
|
|
77 |
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
|
78 |
results = self.pose.process(frame_rgb)
|
79 |
processed_frame = self.process_frame(frame, results)
|
80 |
-
|
81 |
cap.release()
|
|
|
|
|
82 |
# Remove the temporary file
|
83 |
os.remove(filename)
|
84 |
-
|
|
|
|
|
85 |
|
86 |
def process_frame(self, frame, results):
|
87 |
# Process the frame using the `process` function
|
@@ -89,15 +96,7 @@ class VideoProcessor:
|
|
89 |
return processed_frame
|
90 |
|
91 |
def process(self, image):
|
92 |
-
|
93 |
-
Function to process the video frame and run the fitness trainer AI
|
94 |
-
|
95 |
-
Args:
|
96 |
-
image (numpy array): input image from the video
|
97 |
-
|
98 |
-
Returns:
|
99 |
-
numpy array: processed image with keypoint detection and fitness activity classification visualized
|
100 |
-
"""
|
101 |
# Pose detection model
|
102 |
image.flags.writeable = False
|
103 |
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
@@ -118,20 +117,25 @@ class VideoProcessor:
|
|
118 |
|
119 |
self.current_action = self.actions[np.argmax(res)]
|
120 |
confidence = np.max(res)
|
|
|
|
|
121 |
|
122 |
# Erase current action variable if no probability is above threshold
|
123 |
if confidence < self.threshold:
|
124 |
self.current_action = ''
|
|
|
|
|
|
|
|
|
125 |
|
126 |
# Viz probabilities
|
127 |
image = self.prob_viz(res, image)
|
128 |
|
129 |
# Count reps
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
pass
|
135 |
|
136 |
# Display graphical information
|
137 |
cv2.rectangle(image, (0,0), (640, 40), self.colors[np.argmax(res)], -1)
|
@@ -157,90 +161,105 @@ class VideoProcessor:
|
|
157 |
def count_reps(self, image, landmarks, mp_pose):
|
158 |
"""
|
159 |
Counts repetitions of each exercise. Global count and stage (i.e., state) variables are updated within this function.
|
160 |
-
|
161 |
"""
|
162 |
-
|
163 |
if self.current_action == 'curl':
|
164 |
# Get coords
|
165 |
-
shoulder = self.get_coordinates(landmarks, mp_pose, '
|
166 |
-
elbow = self.get_coordinates(landmarks, mp_pose, '
|
167 |
-
wrist = self.get_coordinates(landmarks, mp_pose, '
|
168 |
-
|
169 |
# calculate elbow angle
|
170 |
angle = self.calculate_angle(shoulder, elbow, wrist)
|
171 |
-
|
172 |
# curl counter logic
|
|
|
173 |
if angle < 30:
|
174 |
-
self.curl_stage = "up"
|
175 |
-
if angle > 140 and self.curl_stage =='up':
|
176 |
-
self.curl_stage="down"
|
177 |
-
self.curl_counter +=1
|
|
|
178 |
self.press_stage = None
|
179 |
self.squat_stage = None
|
180 |
-
|
181 |
# Viz joint angle
|
182 |
self.viz_joint_angle(image, angle, elbow)
|
183 |
-
|
184 |
-
elif self.current_action == 'press':
|
185 |
# Get coords
|
186 |
-
shoulder = self.get_coordinates(landmarks, mp_pose, '
|
187 |
-
elbow = self.get_coordinates(landmarks, mp_pose, '
|
188 |
-
wrist = self.get_coordinates(landmarks, mp_pose, '
|
189 |
|
190 |
# Calculate elbow angle
|
191 |
elbow_angle = self.calculate_angle(shoulder, elbow, wrist)
|
192 |
-
|
193 |
# Compute distances between joints
|
194 |
-
shoulder2elbow_dist = abs(math.dist(shoulder,elbow))
|
195 |
-
shoulder2wrist_dist = abs(math.dist(shoulder,wrist))
|
196 |
-
|
197 |
# Press counter logic
|
|
|
|
|
|
|
198 |
if (elbow_angle > 130) and (shoulder2elbow_dist < shoulder2wrist_dist):
|
199 |
self.press_stage = "up"
|
200 |
-
if (elbow_angle < 50) and (shoulder2elbow_dist > shoulder2wrist_dist) and (self.press_stage =='up'):
|
201 |
-
self.press_stage='down'
|
202 |
self.press_counter += 1
|
|
|
|
|
|
|
203 |
self.curl_stage = None
|
204 |
self.squat_stage = None
|
205 |
-
|
206 |
# Viz joint angle
|
207 |
self.viz_joint_angle(image, elbow_angle, elbow)
|
208 |
-
|
209 |
elif self.current_action == 'squat':
|
210 |
# Get coords
|
211 |
# left side
|
212 |
-
left_shoulder = self.get_coordinates(landmarks, mp_pose, '
|
213 |
-
left_hip = self.get_coordinates(landmarks, mp_pose, '
|
214 |
-
left_knee = self.get_coordinates(landmarks, mp_pose, '
|
215 |
-
left_ankle = self.get_coordinates(landmarks, mp_pose, '
|
216 |
# right side
|
217 |
-
right_shoulder = self.get_coordinates(landmarks, mp_pose, '
|
218 |
-
right_hip = self.get_coordinates(landmarks, mp_pose, '
|
219 |
-
right_knee = self.get_coordinates(landmarks, mp_pose, '
|
220 |
-
right_ankle = self.get_coordinates(landmarks, mp_pose, '
|
221 |
-
|
222 |
# Calculate knee angles
|
223 |
left_knee_angle = self.calculate_angle(left_hip, left_knee, left_ankle)
|
224 |
right_knee_angle = self.calculate_angle(right_hip, right_knee, right_ankle)
|
225 |
-
|
226 |
# Calculate hip angles
|
227 |
left_hip_angle = self.calculate_angle(left_shoulder, left_hip, left_knee)
|
228 |
right_hip_angle = self.calculate_angle(right_shoulder, right_hip, right_knee)
|
229 |
-
|
230 |
# Squat counter logic
|
231 |
thr = 165
|
232 |
-
|
|
|
|
|
|
|
|
|
|
|
233 |
self.squat_stage = "down"
|
234 |
-
if (left_knee_angle > thr) and (right_knee_angle > thr) and (left_hip_angle > thr) and (
|
235 |
-
|
|
|
236 |
self.squat_counter += 1
|
|
|
237 |
self.curl_stage = None
|
238 |
self.press_stage = None
|
239 |
-
|
240 |
# Viz joint angles
|
241 |
self.viz_joint_angle(image, left_knee_angle, left_knee)
|
242 |
self.viz_joint_angle(image, left_hip_angle, left_hip)
|
243 |
-
|
244 |
else:
|
245 |
pass
|
246 |
return
|
@@ -258,15 +277,48 @@ class VideoProcessor:
|
|
258 |
|
259 |
return output_frame
|
260 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
261 |
# Define Streamlit app
|
262 |
def main():
|
263 |
st.title("Real-time Exercise Detection")
|
264 |
video_file = st.file_uploader("Upload a video file", type=["mp4", "avi"])
|
265 |
if video_file is not None:
|
266 |
video_processor = VideoProcessor()
|
267 |
-
|
268 |
-
|
269 |
-
|
|
|
|
|
|
|
|
|
270 |
|
271 |
if __name__ == "__main__":
|
272 |
main()
|
|
|
59 |
self.press_stage = None
|
60 |
self.squat_stage = None
|
61 |
self.pose = mp_pose.Pose(min_detection_confidence=0.5, min_tracking_confidence=0.5)
|
62 |
+
|
|
|
63 |
def process_video(self, video_file):
|
64 |
# Get the filename from the file object
|
65 |
filename = "temp_video.mp4"
|
66 |
# Create a temporary file to write the contents of the uploaded video file
|
67 |
with open(filename, 'wb') as temp_file:
|
68 |
temp_file.write(video_file.read())
|
69 |
+
|
70 |
+
# Process the video and save the processed video to a new file
|
71 |
+
output_filename = "processed_video.mp4"
|
72 |
cap = cv2.VideoCapture(filename)
|
73 |
+
frame_width = int(cap.get(3))
|
74 |
+
frame_height = int(cap.get(4))
|
75 |
+
out = cv2.VideoWriter(output_filename, cv2.VideoWriter_fourcc(*'h264'), 30, (frame_width, frame_height))
|
76 |
while cap.isOpened():
|
77 |
ret, frame = cap.read()
|
78 |
if not ret:
|
|
|
80 |
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
|
81 |
results = self.pose.process(frame_rgb)
|
82 |
processed_frame = self.process_frame(frame, results)
|
83 |
+
out.write(processed_frame)
|
84 |
cap.release()
|
85 |
+
out.release()
|
86 |
+
|
87 |
# Remove the temporary file
|
88 |
os.remove(filename)
|
89 |
+
|
90 |
+
# Return the path to the processed video file
|
91 |
+
return output_filename
|
92 |
|
93 |
def process_frame(self, frame, results):
|
94 |
# Process the frame using the `process` function
|
|
|
96 |
return processed_frame
|
97 |
|
98 |
def process(self, image):
|
99 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
# Pose detection model
|
101 |
image.flags.writeable = False
|
102 |
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
|
|
117 |
|
118 |
self.current_action = self.actions[np.argmax(res)]
|
119 |
confidence = np.max(res)
|
120 |
+
print("confidence", confidence) # Debug print statement
|
121 |
+
print("current action" , self.current_action)
|
122 |
|
123 |
# Erase current action variable if no probability is above threshold
|
124 |
if confidence < self.threshold:
|
125 |
self.current_action = ''
|
126 |
+
|
127 |
+
|
128 |
+
print("current action" , self.current_action)
|
129 |
+
|
130 |
|
131 |
# Viz probabilities
|
132 |
image = self.prob_viz(res, image)
|
133 |
|
134 |
# Count reps
|
135 |
+
|
136 |
+
landmarks = results.pose_landmarks.landmark
|
137 |
+
self.count_reps(image, landmarks, mp_pose)
|
138 |
+
|
|
|
139 |
|
140 |
# Display graphical information
|
141 |
cv2.rectangle(image, (0,0), (640, 40), self.colors[np.argmax(res)], -1)
|
|
|
161 |
def count_reps(self, image, landmarks, mp_pose):
|
162 |
"""
|
163 |
Counts repetitions of each exercise. Global count and stage (i.e., state) variables are updated within this function.
|
164 |
+
|
165 |
"""
|
166 |
+
|
167 |
if self.current_action == 'curl':
|
168 |
# Get coords
|
169 |
+
shoulder = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'SHOULDER')
|
170 |
+
elbow = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'ELBOW')
|
171 |
+
wrist = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'WRIST')
|
172 |
+
|
173 |
# calculate elbow angle
|
174 |
angle = self.calculate_angle(shoulder, elbow, wrist)
|
175 |
+
|
176 |
# curl counter logic
|
177 |
+
print("Curl Angle:", angle) # Debug print statement
|
178 |
if angle < 30:
|
179 |
+
self.curl_stage = "up"
|
180 |
+
if angle > 140 and self.curl_stage == 'up':
|
181 |
+
self.curl_stage = "down"
|
182 |
+
self.curl_counter += 1
|
183 |
+
print("count:",self.curl_counter)
|
184 |
self.press_stage = None
|
185 |
self.squat_stage = None
|
186 |
+
|
187 |
# Viz joint angle
|
188 |
self.viz_joint_angle(image, angle, elbow)
|
189 |
+
|
190 |
+
elif self.current_action == 'press':
|
191 |
# Get coords
|
192 |
+
shoulder = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'SHOULDER')
|
193 |
+
elbow = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'ELBOW')
|
194 |
+
wrist = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'WRIST')
|
195 |
|
196 |
# Calculate elbow angle
|
197 |
elbow_angle = self.calculate_angle(shoulder, elbow, wrist)
|
198 |
+
print(shoulder, elbow, wrist)
|
199 |
# Compute distances between joints
|
200 |
+
shoulder2elbow_dist = abs(math.dist(shoulder, elbow))
|
201 |
+
shoulder2wrist_dist = abs(math.dist(shoulder, wrist))
|
202 |
+
|
203 |
# Press counter logic
|
204 |
+
print("Press Angle:", elbow_angle) # Debug print statement
|
205 |
+
print("Shoulder to Elbow Distance:", shoulder2elbow_dist) # Debug print statement
|
206 |
+
print("Shoulder to Wrist Distance:", shoulder2wrist_dist) # Debug print statement
|
207 |
if (elbow_angle > 130) and (shoulder2elbow_dist < shoulder2wrist_dist):
|
208 |
self.press_stage = "up"
|
209 |
+
if (elbow_angle < 50) and (shoulder2elbow_dist > shoulder2wrist_dist) and (self.press_stage == 'up'):
|
210 |
+
self.press_stage = 'down'
|
211 |
self.press_counter += 1
|
212 |
+
|
213 |
+
|
214 |
+
print("count:",self.press_counter)
|
215 |
self.curl_stage = None
|
216 |
self.squat_stage = None
|
217 |
+
|
218 |
# Viz joint angle
|
219 |
self.viz_joint_angle(image, elbow_angle, elbow)
|
220 |
+
|
221 |
elif self.current_action == 'squat':
|
222 |
# Get coords
|
223 |
# left side
|
224 |
+
left_shoulder = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'SHOULDER')
|
225 |
+
left_hip = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'HIP')
|
226 |
+
left_knee = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'KNEE')
|
227 |
+
left_ankle = self.get_coordinates(landmarks, mp_pose, 'LEFT', 'ANKLE')
|
228 |
# right side
|
229 |
+
right_shoulder = self.get_coordinates(landmarks, mp_pose, 'RIGHT', 'SHOULDER')
|
230 |
+
right_hip = self.get_coordinates(landmarks, mp_pose, 'RIGHT', 'HIP')
|
231 |
+
right_knee = self.get_coordinates(landmarks, mp_pose, 'RIGHT', 'KNEE')
|
232 |
+
right_ankle = self.get_coordinates(landmarks, mp_pose, 'RIGHT', 'ANKLE')
|
233 |
+
|
234 |
# Calculate knee angles
|
235 |
left_knee_angle = self.calculate_angle(left_hip, left_knee, left_ankle)
|
236 |
right_knee_angle = self.calculate_angle(right_hip, right_knee, right_ankle)
|
237 |
+
|
238 |
# Calculate hip angles
|
239 |
left_hip_angle = self.calculate_angle(left_shoulder, left_hip, left_knee)
|
240 |
right_hip_angle = self.calculate_angle(right_shoulder, right_hip, right_knee)
|
241 |
+
|
242 |
# Squat counter logic
|
243 |
thr = 165
|
244 |
+
print("Left Knee Angle:", left_knee_angle) # Debug print statement
|
245 |
+
print("Right Knee Angle:", right_knee_angle) # Debug print statement
|
246 |
+
print("Left Hip Angle:", left_hip_angle) # Debug print statement
|
247 |
+
print("Right Hip Angle:", right_hip_angle) # Debug print statement
|
248 |
+
if (left_knee_angle < thr) and (right_knee_angle < thr) and (left_hip_angle < thr) and (
|
249 |
+
right_hip_angle < thr):
|
250 |
self.squat_stage = "down"
|
251 |
+
if (left_knee_angle > thr) and (right_knee_angle > thr) and (left_hip_angle > thr) and (
|
252 |
+
right_hip_angle > thr) and (self.squat_stage == 'down'):
|
253 |
+
self.squat_stage = 'up'
|
254 |
self.squat_counter += 1
|
255 |
+
print("count:",self.squat_counter)
|
256 |
self.curl_stage = None
|
257 |
self.press_stage = None
|
258 |
+
|
259 |
# Viz joint angles
|
260 |
self.viz_joint_angle(image, left_knee_angle, left_knee)
|
261 |
self.viz_joint_angle(image, left_hip_angle, left_hip)
|
262 |
+
|
263 |
else:
|
264 |
pass
|
265 |
return
|
|
|
277 |
|
278 |
return output_frame
|
279 |
|
280 |
+
def get_coordinates(self, landmarks, mp_pose, side, part):
|
281 |
+
|
282 |
+
|
283 |
+
coord = getattr(mp_pose.PoseLandmark,side.upper()+"_"+part.upper())
|
284 |
+
x_coord_val = landmarks[coord.value].x
|
285 |
+
y_coord_val = landmarks[coord.value].y
|
286 |
+
return [x_coord_val, y_coord_val]
|
287 |
+
|
288 |
+
|
289 |
+
|
290 |
+
|
291 |
+
|
292 |
+
|
293 |
+
|
294 |
+
def calculate_angle(self, a, b, c):
|
295 |
+
a = np.array(a)
|
296 |
+
b = np.array(b)
|
297 |
+
c = np.array(c)
|
298 |
+
radians = math.atan2(c[1]-b[1], c[0]-b[0]) - math.atan2(a[1]-b[1], a[0]-b[0])
|
299 |
+
angle = np.abs(radians*180.0/np.pi)
|
300 |
+
if angle > 180.0:
|
301 |
+
angle = 360 - angle
|
302 |
+
return angle
|
303 |
+
|
304 |
+
def viz_joint_angle(self, image, angle, joint):
|
305 |
+
cv2.putText(image, str(round(angle, 2)),
|
306 |
+
tuple(np.multiply(joint, [640, 480]).astype(int)),
|
307 |
+
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 2, cv2.LINE_AA)
|
308 |
+
|
309 |
# Define Streamlit app
|
310 |
def main():
|
311 |
st.title("Real-time Exercise Detection")
|
312 |
video_file = st.file_uploader("Upload a video file", type=["mp4", "avi"])
|
313 |
if video_file is not None:
|
314 |
video_processor = VideoProcessor()
|
315 |
+
|
316 |
+
output_video = video_processor.process_video(video_file)
|
317 |
+
|
318 |
+
|
319 |
+
video_file = open(output_video, 'rb')
|
320 |
+
video_bytes = video_file.read()
|
321 |
+
st.video(video_bytes)
|
322 |
|
323 |
if __name__ == "__main__":
|
324 |
main()
|