File size: 3,404 Bytes
36dac79
7ffdd20
231be40
 
a97188b
7ffdd20
46274ff
a97188b
 
bfb3b6f
 
 
 
7ffdd20
 
1effd41
a97188b
 
 
 
 
 
 
 
 
 
ca00c20
 
 
 
 
 
 
 
 
 
 
 
 
 
a97188b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231be40
 
 
a97188b
 
 
 
7ffdd20
a97188b
7ffdd20
 
 
a97188b
 
 
 
7ffdd20
a97188b
 
7ffdd20
 
c096457
7ffdd20
 
 
36dac79
7ffdd20
 
 
 
a97188b
36dac79
7ffdd20
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import cv2
import numpy as np
import os
import onnxruntime as ort
import streamlit as st
from PIL import Image

# Preprocess image to match model input requirements
def preprocess_image(image, face_landmarks=None):
    image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2GRAY)  # Convert image to grayscale
    image_resized = cv2.resize(image, (48, 48))  # Resize image to 48x48
    image_input = np.expand_dims(image_resized, axis=0)  # Add batch dimension
    image_input = np.expand_dims(image_input, axis=0)  # Add channel dimension (for grayscale)
    image_input = image_input.astype(np.float32) / 255.0  # Normalize the image
    return image_input

# Check if smile is present in the facial landmarks
def check_for_smile(face_landmarks):
    """Simple rule to check for smile based on landmarks"""
    mouth = face_landmarks['bottom_lip'] + face_landmarks['top_lip']
    mouth_distance = np.linalg.norm(np.array(mouth[0]) - np.array(mouth[-1]))
    if mouth_distance > 30:  # This threshold might need adjustment
        return True
    return False

# Display emotion with post-processing to check for smiles
def display_emotion(emotion):
    """Map predicted emotion index to a label"""
    emotion_map = {
        0: "Anger",
        1: "Disgust",
        2: "Fear",
        3: "Happiness",
        4: "Sadness",
        5: "Surprise",
        6: "Neutral"
    }
    return emotion_map.get(emotion, "Unknown")

# Display emotion with smile detection
def display_emotion_with_smile(emotion, face_landmarks=None):
    if emotion == 6 and face_landmarks:  # 'Neutral' is typically 6 in the emotion_map
        if check_for_smile(face_landmarks):
            return "Happiness"  # Override neutral with happiness if a smile is detected
    return display_emotion(emotion)  # Otherwise return the normal emotion

# Predict emotion with smile detection
def predict_emotion_with_smile(image_input, face_landmarks=None):
    """Run inference and predict the emotion, considering smile detection"""
    emotion = predict_emotion(image_input)  # Normal emotion prediction
    emotion_label = display_emotion_with_smile(emotion, face_landmarks)
    return emotion_label

# Load ONNX model
def load_model():
    model_path = 'onnx_model.onnx'  # Make sure this is the correct path
    if not os.path.exists(model_path):
        raise FileNotFoundError(f"Model file {model_path} not found!")
    emotion_model = ort.InferenceSession(model_path)
    return emotion_model

# Predict emotion using the ONNX model
def predict_emotion(image_input):
    emotion_model = load_model()
    input_name = emotion_model.get_inputs()[0].name
    output_name = emotion_model.get_outputs()[0].name
    prediction = emotion_model.run([output_name], {input_name: image_input})
    return np.argmax(prediction[0])

# Streamlit app code
st.title("Emotion Recognition App")

# Upload an image
uploaded_file = st.file_uploader("Upload an image", type=["jpg", "jpeg", "png"])

# If an image is uploaded
if uploaded_file is not None:
    # Open and display the uploaded image
    image = Image.open(uploaded_file)
    st.image(image, caption="Uploaded Image", use_column_width=True)

    # Preprocess the image
    image_input = preprocess_image(image)

    # Predict the emotion
    emotion_label = predict_emotion_with_smile(image_input)

    # Display the predicted emotion
    st.write(f"Detected Emotion: {emotion_label}")