Pratyush101 commited on
Commit
bba6cec
·
verified ·
1 Parent(s): df8ec21

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -54
app.py CHANGED
@@ -187,42 +187,45 @@
187
  # """
188
  # )
189
 
 
 
 
190
  import cv2
191
- import mediapipe as mp
192
  import numpy as np
 
193
  import streamlit as st
194
  from streamlit_webrtc import webrtc_streamer
195
 
196
- # Initialize MediaPipe Pose
 
 
 
 
 
 
 
197
  mp_pose = mp.solutions.pose
198
  mp_drawing = mp.solutions.drawing_utils
199
 
200
- # Function to calculate angles between three points
201
  def calculate_angle(a, b, c):
202
- a = np.array(a) # First point
203
- b = np.array(b) # Midpoint
204
- c = np.array(c) # Endpoint
205
  radians = np.arctan2(c[1]-b[1], c[0]-b[0]) - np.arctan2(a[1]-b[1], a[0]-b[0])
206
  angle = np.abs(radians * 180.0 / np.pi)
207
  if angle > 180.0:
208
  angle = 360 - angle
209
  return angle
210
 
211
- # Squat detection processor class
212
- class VideoProcessor:
213
- def __init__(self):
214
- self.pose = mp_pose.Pose(min_detection_confidence=0.5, min_tracking_confidence=0.5)
215
-
216
- def recv(self, frame):
217
- image = frame.to_ndarray(format="bgr24")
218
- image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
219
- image.flags.writeable = False
220
- results = self.pose.process(image)
221
- image.flags.writeable = True
222
- image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
223
-
224
- try:
225
- landmarks = results.pose_landmarks.landmark
226
  hip = [landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].x,
227
  landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].y]
228
  knee = [landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value].x,
@@ -231,39 +234,43 @@ class VideoProcessor:
231
  landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].y]
232
  shoulder = [landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].x,
233
  landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y]
234
- foot_index = [landmarks[mp_pose.PoseLandmark.LEFT_FOOT_INDEX.value].x,
235
- landmarks[mp_pose.PoseLandmark.LEFT_FOOT_INDEX.value].y]
236
- x_axis_hip = [hip[0], 0]
237
-
238
- angle_knee = calculate_angle(hip, knee, ankle)
239
- angle_ankle = calculate_angle(foot_index, ankle, knee)
240
- angle_hip = calculate_angle(shoulder, hip, x_axis_hip)
241
-
242
- feedback = ""
243
- if 80 < angle_knee < 110 and 29 < angle_hip < 40:
244
- feedback = "Good Squat!"
245
- elif angle_knee < 80:
246
- feedback = "Squat too deep!"
247
- elif angle_knee > 110:
248
- feedback = "Lower your hips!"
249
- elif angle_hip < 29:
250
- feedback = "Bend Forward!"
251
- elif angle_hip > 45:
252
- feedback = "Bend Backward!"
253
-
254
- cv2.putText(image, feedback, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv2.LINE_AA)
255
- mp_drawing.draw_landmarks(image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS)
256
- except Exception as e:
257
- print(f"Error: {e}")
258
-
259
- return frame.from_ndarray(image, format="bgr24")
260
-
261
- # Streamlit WebRTC configuration
 
 
 
 
262
  webrtc_streamer(
263
- key="squat_detector",
264
- video_processor_factory=VideoProcessor,
265
  media_stream_constraints={"video": True, "audio": False},
266
- rtc_configuration={"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}]},
267
  )
268
 
269
 
@@ -292,8 +299,6 @@ webrtc_streamer(
292
 
293
 
294
 
295
-
296
-
297
 
298
 
299
 
 
187
  # """
188
  # )
189
 
190
+ )
191
+
192
+ import logging
193
  import cv2
 
194
  import numpy as np
195
+ import mediapipe as mp
196
  import streamlit as st
197
  from streamlit_webrtc import webrtc_streamer
198
 
199
+ # Logging setup
200
+ logger = logging.getLogger(__name__)
201
+
202
+ # Streamlit setup
203
+ st.title("AI Squat Detection using WebRTC")
204
+ st.info("Use your webcam for real-time squat detection.")
205
+
206
+ # Initialize MediaPipe components
207
  mp_pose = mp.solutions.pose
208
  mp_drawing = mp.solutions.drawing_utils
209
 
210
+ # Angle calculation function
211
  def calculate_angle(a, b, c):
212
+ a = np.array(a)
213
+ b = np.array(b)
214
+ c = np.array(c)
215
  radians = np.arctan2(c[1]-b[1], c[0]-b[0]) - np.arctan2(a[1]-b[1], a[0]-b[0])
216
  angle = np.abs(radians * 180.0 / np.pi)
217
  if angle > 180.0:
218
  angle = 360 - angle
219
  return angle
220
 
221
+ def process_frame(frame):
222
+ image = frame.to_ndarray(format="bgr24")
223
+ image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
224
+ with mp_pose.Pose(min_detection_confidence=0.5, min_tracking_confidence=0.5) as pose:
225
+ results = pose.process(image_rgb)
226
+ landmarks = results.pose_landmarks.landmark if results.pose_landmarks else []
227
+
228
+ if landmarks:
 
 
 
 
 
 
 
229
  hip = [landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].x,
230
  landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].y]
231
  knee = [landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value].x,
 
234
  landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].y]
235
  shoulder = [landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].x,
236
  landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y]
237
+ foot = [landmarks[mp_pose.PoseLandmark.LEFT_FOOT_INDEX.value].x,
238
+ landmarks[mp_pose.PoseLandmark.LEFT_FOOT_INDEX.value].y]
239
+
240
+ # Calculate angles
241
+ knee_angle = calculate_angle(hip, knee, ankle)
242
+ hip_angle = calculate_angle(shoulder, hip, [hip[0], 0])
243
+ ankle_angle = calculate_angle(foot, ankle, knee)
244
+
245
+ # Display key angles
246
+ cv2.putText(image, f"Knee: {int(knee_angle)}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
247
+ cv2.putText(image, f"Hip: {int(hip_angle)}", (10, 100), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
248
+ cv2.putText(image, f"Ankle: {int(ankle_angle)}", (10, 150), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
249
+
250
+ # Squat logic
251
+ if 80 < knee_angle < 110 and 29 < hip_angle < 40:
252
+ cv2.putText(image, "Squat Detected!", (300, 100), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 0), 3)
253
+ else:
254
+ if hip_angle < 29:
255
+ cv2.putText(image, "Lean Forward!", (300, 200), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
256
+ elif hip_angle > 45:
257
+ cv2.putText(image, "Lean Backward!", (300, 200), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
258
+ if knee_angle < 80:
259
+ cv2.putText(image, "Squat Too Deep!", (300, 250), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
260
+ elif knee_angle > 110:
261
+ cv2.putText(image, "Lower Your Hips!", (300, 300), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
262
+
263
+ mp_drawing.draw_landmarks(image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS,
264
+ mp_drawing.DrawingSpec(color=(255, 175, 0), thickness=2, circle_radius=2),
265
+ mp_drawing.DrawingSpec(color=(0, 255, 200), thickness=2, circle_radius=2))
266
+ return image
267
+
268
+ # WebRTC streamer configuration
269
  webrtc_streamer(
270
+ key="squat-detection",
271
+ video_processor_factory=lambda: process_frame,
272
  media_stream_constraints={"video": True, "audio": False},
273
+ async_processing=True
274
  )
275
 
276
 
 
299
 
300
 
301
 
 
 
302
 
303
 
304