Update app.py
Browse files
app.py
CHANGED
@@ -187,42 +187,45 @@
|
|
187 |
# """
|
188 |
# )
|
189 |
|
|
|
|
|
|
|
190 |
import cv2
|
191 |
-
import mediapipe as mp
|
192 |
import numpy as np
|
|
|
193 |
import streamlit as st
|
194 |
from streamlit_webrtc import webrtc_streamer
|
195 |
|
196 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
197 |
mp_pose = mp.solutions.pose
|
198 |
mp_drawing = mp.solutions.drawing_utils
|
199 |
|
200 |
-
#
|
201 |
def calculate_angle(a, b, c):
|
202 |
-
a = np.array(a)
|
203 |
-
b = np.array(b)
|
204 |
-
c = np.array(c)
|
205 |
radians = np.arctan2(c[1]-b[1], c[0]-b[0]) - np.arctan2(a[1]-b[1], a[0]-b[0])
|
206 |
angle = np.abs(radians * 180.0 / np.pi)
|
207 |
if angle > 180.0:
|
208 |
angle = 360 - angle
|
209 |
return angle
|
210 |
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
image.flags.writeable = False
|
220 |
-
results = self.pose.process(image)
|
221 |
-
image.flags.writeable = True
|
222 |
-
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
|
223 |
-
|
224 |
-
try:
|
225 |
-
landmarks = results.pose_landmarks.landmark
|
226 |
hip = [landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].x,
|
227 |
landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].y]
|
228 |
knee = [landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value].x,
|
@@ -231,39 +234,43 @@ class VideoProcessor:
|
|
231 |
landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].y]
|
232 |
shoulder = [landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].x,
|
233 |
landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y]
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
|
|
|
|
|
|
|
|
262 |
webrtc_streamer(
|
263 |
-
key="
|
264 |
-
video_processor_factory=
|
265 |
media_stream_constraints={"video": True, "audio": False},
|
266 |
-
|
267 |
)
|
268 |
|
269 |
|
@@ -292,8 +299,6 @@ webrtc_streamer(
|
|
292 |
|
293 |
|
294 |
|
295 |
-
|
296 |
-
|
297 |
|
298 |
|
299 |
|
|
|
187 |
# """
|
188 |
# )
|
189 |
|
190 |
+
)
|
191 |
+
|
192 |
+
import logging
|
193 |
import cv2
|
|
|
194 |
import numpy as np
|
195 |
+
import mediapipe as mp
|
196 |
import streamlit as st
|
197 |
from streamlit_webrtc import webrtc_streamer
|
198 |
|
199 |
+
# Logging setup
|
200 |
+
logger = logging.getLogger(__name__)
|
201 |
+
|
202 |
+
# Streamlit setup
|
203 |
+
st.title("AI Squat Detection using WebRTC")
|
204 |
+
st.info("Use your webcam for real-time squat detection.")
|
205 |
+
|
206 |
+
# Initialize MediaPipe components
|
207 |
mp_pose = mp.solutions.pose
|
208 |
mp_drawing = mp.solutions.drawing_utils
|
209 |
|
210 |
+
# Angle calculation function
|
211 |
def calculate_angle(a, b, c):
|
212 |
+
a = np.array(a)
|
213 |
+
b = np.array(b)
|
214 |
+
c = np.array(c)
|
215 |
radians = np.arctan2(c[1]-b[1], c[0]-b[0]) - np.arctan2(a[1]-b[1], a[0]-b[0])
|
216 |
angle = np.abs(radians * 180.0 / np.pi)
|
217 |
if angle > 180.0:
|
218 |
angle = 360 - angle
|
219 |
return angle
|
220 |
|
221 |
+
def process_frame(frame):
|
222 |
+
image = frame.to_ndarray(format="bgr24")
|
223 |
+
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
224 |
+
with mp_pose.Pose(min_detection_confidence=0.5, min_tracking_confidence=0.5) as pose:
|
225 |
+
results = pose.process(image_rgb)
|
226 |
+
landmarks = results.pose_landmarks.landmark if results.pose_landmarks else []
|
227 |
+
|
228 |
+
if landmarks:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
229 |
hip = [landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].x,
|
230 |
landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].y]
|
231 |
knee = [landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value].x,
|
|
|
234 |
landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].y]
|
235 |
shoulder = [landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].x,
|
236 |
landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y]
|
237 |
+
foot = [landmarks[mp_pose.PoseLandmark.LEFT_FOOT_INDEX.value].x,
|
238 |
+
landmarks[mp_pose.PoseLandmark.LEFT_FOOT_INDEX.value].y]
|
239 |
+
|
240 |
+
# Calculate angles
|
241 |
+
knee_angle = calculate_angle(hip, knee, ankle)
|
242 |
+
hip_angle = calculate_angle(shoulder, hip, [hip[0], 0])
|
243 |
+
ankle_angle = calculate_angle(foot, ankle, knee)
|
244 |
+
|
245 |
+
# Display key angles
|
246 |
+
cv2.putText(image, f"Knee: {int(knee_angle)}", (10, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
|
247 |
+
cv2.putText(image, f"Hip: {int(hip_angle)}", (10, 100), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
|
248 |
+
cv2.putText(image, f"Ankle: {int(ankle_angle)}", (10, 150), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
|
249 |
+
|
250 |
+
# Squat logic
|
251 |
+
if 80 < knee_angle < 110 and 29 < hip_angle < 40:
|
252 |
+
cv2.putText(image, "Squat Detected!", (300, 100), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 0), 3)
|
253 |
+
else:
|
254 |
+
if hip_angle < 29:
|
255 |
+
cv2.putText(image, "Lean Forward!", (300, 200), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
|
256 |
+
elif hip_angle > 45:
|
257 |
+
cv2.putText(image, "Lean Backward!", (300, 200), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
|
258 |
+
if knee_angle < 80:
|
259 |
+
cv2.putText(image, "Squat Too Deep!", (300, 250), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
|
260 |
+
elif knee_angle > 110:
|
261 |
+
cv2.putText(image, "Lower Your Hips!", (300, 300), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 3)
|
262 |
+
|
263 |
+
mp_drawing.draw_landmarks(image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS,
|
264 |
+
mp_drawing.DrawingSpec(color=(255, 175, 0), thickness=2, circle_radius=2),
|
265 |
+
mp_drawing.DrawingSpec(color=(0, 255, 200), thickness=2, circle_radius=2))
|
266 |
+
return image
|
267 |
+
|
268 |
+
# WebRTC streamer configuration
|
269 |
webrtc_streamer(
|
270 |
+
key="squat-detection",
|
271 |
+
video_processor_factory=lambda: process_frame,
|
272 |
media_stream_constraints={"video": True, "audio": False},
|
273 |
+
async_processing=True
|
274 |
)
|
275 |
|
276 |
|
|
|
299 |
|
300 |
|
301 |
|
|
|
|
|
302 |
|
303 |
|
304 |
|