ergonomics / app.py
Prajithr04's picture
commit
c231584
import cv2
import math
import base64
import numpy as np
import mediapipe as mp
from io import BytesIO
from fastapi import FastAPI, File, UploadFile
from fastapi.responses import Response
from fastapi.middleware.cors import CORSMiddleware # Add CORS support
from PIL import Image
# Initialize FastAPI app
app = FastAPI()
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Initialize Mediapipe Pose model
mp_pose = mp.solutions.pose
pose = mp_pose.Pose(
static_image_mode=False,
min_detection_confidence=0.5,
min_tracking_confidence=0.5
)
# Function to calculate angles between three points
def calculate_angle(a, b, c):
ab = (b[0] - a[0], b[1] - a[1])
bc = (c[0] - b[0], c[1] - b[1])
dot_product = ab[0] * bc[0] + ab[1] * bc[1]
magnitude_ab = math.sqrt(ab[0]**2 + ab[1]**2)
magnitude_bc = math.sqrt(bc[0]**2 + bc[1]**2)
# To avoid division by zero
if magnitude_ab * magnitude_bc == 0:
return 0.0
# Clamp the cosine value to the [-1, 1] range to avoid numerical errors
cosine_angle = max(min(dot_product / (magnitude_ab * magnitude_bc), 1), -1)
angle_radians = math.acos(cosine_angle)
angle_degrees = math.degrees(angle_radians)
return angle_degrees
# Function to calculate a simplified REBA score based on trunk (hip) and neck angles.
def calculate_reba(trunk_angle, neck_angle):
"""
This is a simplified approach:
- For the trunk (approximated by the hip angle), a nearly upright posture (angle >= 160°) is scored as 1,
a moderately bent posture (angle between 140° and 160°) is scored as 2, and a severely bent posture (<140°) is scored as 3.
- Similarly for the neck angle.
- The REBA score is the sum of these scores.
- Finally, we define a risk level based on the total score.
"""
# Determine trunk score (using the hip angle)
if trunk_angle >= 160:
trunk_score = 1
elif trunk_angle >= 140:
trunk_score = 2
else:
trunk_score = 3
# Determine neck score
if neck_angle >= 150:
neck_score = 1
elif neck_angle >= 130:
neck_score = 2
else:
neck_score = 3
# Simplified REBA group A score (normally REBA also considers legs, arms, load, etc.)
reba_score = trunk_score + neck_score
# Define risk levels based on the score
if reba_score <= 2:
risk = "Negligible"
elif reba_score <= 4:
risk = "Low"
elif reba_score <= 6:
risk = "Medium"
else:
risk = "High"
return reba_score, risk
# Process image with Mediapipe Pose Estimation and analyze posture using REBA score
def process_frame(image):
h, w, _ = image.shape
# Convert to RGB
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image_rgb.flags.writeable = False
results = pose.process(image_rgb)
image_rgb.flags.writeable = True
# Convert back to BGR for display
image = cv2.cvtColor(image_rgb, cv2.COLOR_RGB2BGR)
if results.pose_landmarks:
# Get key landmarks from the right side
right_shoulder = results.pose_landmarks.landmark[mp_pose.PoseLandmark.RIGHT_SHOULDER]
right_hip = results.pose_landmarks.landmark[mp_pose.PoseLandmark.RIGHT_HIP]
right_ear = results.pose_landmarks.landmark[mp_pose.PoseLandmark.RIGHT_EAR]
# Convert normalized coordinates to pixel coordinates
cx_rs, cy_rs = int(right_shoulder.x * w), int(right_shoulder.y * h)
cx_rh, cy_rh = int(right_hip.x * w), int(right_hip.y * h)
cx_re, cy_re = int(right_ear.x * w), int(right_ear.y * h)
# Create reference points by applying an offset (helps approximate vertical)
offset = 60
upper_shoulder = (cx_rs, max(0, cy_rs - offset))
upper_hip = (cx_rh, max(0, cy_rh - offset))
# Draw reference landmarks on the image
cv2.circle(image, upper_shoulder, 5, (0, 255, 0), -1)
cv2.circle(image, upper_hip, 5, (0, 255, 0), -1)
# Draw lines connecting key points
cv2.line(image, (cx_rh, cy_rh), (cx_rs, cy_rs), (255, 0, 255), 2) # Hip to shoulder
cv2.line(image, (cx_rs, cy_rs), (cx_re, cy_re), (255, 255, 0), 2) # Shoulder to ear
cv2.line(image, (cx_rh, cy_rh), upper_hip, (0, 165, 255), 2) # Hip to upper hip
cv2.line(image, (cx_rs, cy_rs), upper_shoulder, (0, 255, 255), 2) # Shoulder to upper shoulder
# Calculate angles using the defined reference points
angle_hip = calculate_angle(upper_hip, (cx_rh, cy_rh), (cx_rs, cy_rs))
angle_neck = calculate_angle((cx_rs, cy_rs), (cx_re, cy_re), upper_shoulder)
# Compute the simplified REBA score and corresponding risk level
reba_score, risk = calculate_reba(angle_hip, angle_neck)
# Display the calculated angles on the image
cv2.putText(image, f"Hip Angle: {angle_hip:.1f}", (10, 60),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 0, 255), 2)
cv2.putText(image, f"Neck Angle: {angle_neck:.1f}", (10, 90),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 0), 2)
# Display the simplified REBA score and risk level on the image
cv2.putText(image, f"REBA Score: {reba_score} ({risk})", (10, 120),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 2)
return image
# API Route to receive an image and return the processed image with REBA analysis
@app.post("/upload")
async def upload_image(file: UploadFile = File(...)):
contents = await file.read()
image = Image.open(BytesIO(contents))
image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
# Process the image (the processing function now includes REBA score analysis)
processed_image = process_frame(image)
# Encode the processed image to return it as JPEG
_, buffer = cv2.imencode(".jpg", processed_image)
return Response(content=buffer.tobytes(), media_type="image/jpeg")