Spaces:
Sleeping
Sleeping
import gradio as gr | |
import cv2 | |
import mediapipe as mp | |
import numpy as np | |
from PIL import Image | |
# Initialize mediapipe pose class | |
mp_pose = mp.solutions.pose | |
pose = mp_pose.Pose(static_image_mode=False, min_detection_confidence=0.5, model_complexity=1) | |
mp_drawing = mp.solutions.drawing_utils | |
# Function to calculate the angle between three points | |
def calculate_angle(a, b, c): | |
a = np.array([a.x, a.y]) # First point | |
b = np.array([b.x, b.y]) # Mid point | |
c = np.array([c.x, c.y]) # End point | |
radians = np.arctan2(c[1] - b[1], c[0] - b[0]) - np.arctan2(a[1] - b[1], a[0] - b[0]) | |
angle = np.abs(radians * 180.0 / np.pi) | |
if angle > 180.0: | |
angle = 360 - angle | |
return angle | |
# Define a function to classify yoga poses | |
def classify_pose(landmarks, output_image, display=False): | |
label = 'Unknown Pose' | |
color = (0, 0, 255) | |
# Calculate the required angles | |
left_elbow_angle = calculate_angle( | |
landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value], | |
landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value], | |
landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value]) | |
right_elbow_angle = calculate_angle( | |
landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value], | |
landmarks[mp_pose.PoseLandmark.RIGHT_ELBOW.value], | |
landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value]) | |
left_shoulder_angle = calculate_angle( | |
landmarks[mp_pose.PoseLandmark.LEFT_ELBOW.value], | |
landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value], | |
landmarks[mp_pose.PoseLandmark.LEFT_HIP.value]) | |
right_shoulder_angle = calculate_angle( | |
landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value], | |
landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value], | |
landmarks[mp_pose.PoseLandmark.RIGHT_ELBOW.value]) | |
left_knee_angle = calculate_angle( | |
landmarks[mp_pose.PoseLandmark.LEFT_HIP.value], | |
landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value], | |
landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value]) | |
right_knee_angle = calculate_angle( | |
landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value], | |
landmarks[mp_pose.PoseLandmark.RIGHT_KNEE.value], | |
landmarks[mp_pose.PoseLandmark.RIGHT_ANKLE.value]) | |
# Check for Five-Pointed Star Pose | |
if abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y - landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].y) < 0.1 and \ | |
abs(landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].y - landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value].y) < 0.1 and \ | |
abs(landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_ANKLE.value].x) > 0.2 and \ | |
abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].x) > 0.2: | |
label = "Five-Pointed Star Pose" | |
# Check for Warrior II pose | |
if 165 < left_elbow_angle < 195 and 165 < right_elbow_angle < 195 and \ | |
80 < left_shoulder_angle < 110 and 80 < right_shoulder_angle < 110: | |
if (165 < left_knee_angle < 195 or 165 < right_knee_angle < 195) and \ | |
(90 < left_knee_angle < 120 or 90 < right_knee_angle < 120): | |
label = 'Warrior II Pose' | |
# Check for T pose | |
if 165 < left_elbow_angle < 195 and 165 < right_elbow_angle < 195 and \ | |
80 < left_shoulder_angle < 110 and 80 < right_shoulder_angle < 110 and \ | |
160 < left_knee_angle < 195 and 160 < right_knee_angle < 195: | |
label = 'T Pose' | |
# Check for Tree Pose | |
if (165 < left_knee_angle < 195 or 165 < right_knee_angle < 195) and \ | |
(315 < left_knee_angle < 335 or 25 < right_knee_angle < 45): | |
label = 'Tree Pose' | |
# Check for Upward Salute Pose | |
if abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.LEFT_HIP.value].x) < 0.1 and \ | |
abs(landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_HIP.value].x) < 0.1 and \ | |
landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y < landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y and \ | |
landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].y < landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value].y and \ | |
abs(landmarks[mp_pose.PoseLandmark.LEFT_SHOULDER.value].y - landmarks[mp_pose.PoseLandmark.RIGHT_SHOULDER.value].y) < 0.05: | |
label = "Upward Salute Pose" | |
# Check for Hands Under Feet Pose | |
if landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].y > landmarks[mp_pose.PoseLandmark.LEFT_KNEE.value].y and \ | |
landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].y > landmarks[mp_pose.PoseLandmark.RIGHT_KNEE.value].y and \ | |
abs(landmarks[mp_pose.PoseLandmark.LEFT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.LEFT_ANKLE.value].x) < 0.05 and \ | |
abs(landmarks[mp_pose.PoseLandmark.RIGHT_WRIST.value].x - landmarks[mp_pose.PoseLandmark.RIGHT_ANKLE.value].x) < 0.05: | |
label = "Hands Under Feet Pose" | |
# Check for Plank Pose | |
# The body should be in a straight line from head to heels, | |
# so the shoulder and hip angles should be close to 180 degrees | |
if left_shoulder_angle > 160 and left_shoulder_angle < 200 and \ | |
right_shoulder_angle > 160 and right_shoulder_angle < 200 and \ | |
left_knee_angle > 160 and left_knee_angle < 200 and \ | |
right_knee_angle > 160 and right_knee_angle < 200: | |
label = "Plank Pose" | |
# Update the color to green if pose is classified | |
if label != 'Unknown Pose': | |
color = (0, 255, 0) | |
# Write the label on the output image | |
cv2.putText(output_image, label, (10, 30), cv2.FONT_HERSHEY_PLAIN, 2, color, 2) | |
return output_image, label | |
def detect_and_classify_pose(input_image): | |
# Convert input to numpy array if it's not | |
if isinstance(input_image, Image.Image): | |
input_image = np.array(input_image) | |
# Convert the image from RGB to BGR (OpenCV format) | |
input_image = cv2.cvtColor(input_image, cv2.COLOR_RGB2BGR) | |
results = pose.process(input_image) | |
pose_classification = "No pose detected" | |
if results.pose_landmarks: | |
mp_drawing.draw_landmarks(input_image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS) | |
input_image, pose_classification = classify_pose(results.pose_landmarks.landmark, input_image) | |
return cv2.cvtColor(input_image, cv2.COLOR_BGR2RGB), pose_classification | |
iface = gr.Interface( | |
fn=detect_and_classify_pose, | |
inputs=gr.Video(), | |
outputs=["image", "text"], | |
title="Live Yoga Pose Detection and Classification", | |
description="This app detects and classifies yoga poses from the live camera feed using MediaPipe.", | |
) | |
iface.launch(share=True) | |