File size: 2,384 Bytes
1f130ae
46e2aed
 
412adf0
46e2aed
9a5dd5f
46e2aed
 
 
 
 
5cde790
46e2aed
1f130ae
46e2aed
 
 
 
 
7ffdd20
1effd41
46e2aed
 
 
 
 
 
 
 
 
1f130ae
46e2aed
 
 
 
 
 
 
412adf0
46e2aed
 
412adf0
46e2aed
1f130ae
412adf0
c096457
46e2aed
7ffdd20
 
36dac79
46e2aed
 
 
1f130ae
 
7ffdd20
46e2aed
 
36dac79
46e2aed
 
9a5dd5f
46e2aed
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import cv2
import numpy as np
import onnxruntime as ort
import streamlit as st
from PIL import Image

# Load the ONNX model
def load_model(model_path='onnx_model.onnx'):
    # Load the ONNX model
    model = ort.InferenceSession(model_path)
    return model

# Preprocess the image
def preprocess_image(image):
    image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)  # Convert to BGR (OpenCV format)
    image_resized = cv2.resize(image, (224, 224))  # Resize to 224x224
    image_input = np.expand_dims(image_resized, axis=0)  # Add batch dimension
    image_input = image_input.transpose(0, 3, 1, 2)  # Change dimensions to (1, 3, 224, 224)
    image_input = image_input.astype(np.float32) / 255.0  # Normalize the image
    return image_input

# Map the raw output to emotions
def get_emotion_from_output(output):
    emotion_labels = ['Anger', 'Disgust', 'Fear', 'Happiness', 'Sadness', 'Surprise', 'Neutral']
    # Get the index of the highest value in the output (i.e., predicted emotion)
    emotion_index = np.argmax(output)
    confidence = output[0][emotion_index]  # Confidence of the prediction
    emotion = emotion_labels[emotion_index]  # Corresponding emotion label
    return emotion, confidence

# Predict emotion using the ONNX model
def predict_emotion_onnx(model, image_input):
    # Get the input name and output name for the ONNX model
    input_name = model.get_inputs()[0].name
    output_name = model.get_outputs()[0].name
    # Run the model
    prediction = model.run([output_name], {input_name: image_input})
    return prediction[0]

# Streamlit UI
st.title("Emotion Detection")

# Upload an image
uploaded_file = st.file_uploader("Upload an image", type=["jpg", "jpeg", "png"])

if uploaded_file is not None:
    # Open and display the uploaded image
    image = Image.open(uploaded_file)
    st.image(image, caption="Uploaded Image", use_column_width=True)

    # Load model
    onnx_model = load_model()

    # Preprocess the image
    image_input = preprocess_image(image)

    # Get emotion prediction
    emotion_prediction = predict_emotion_onnx(onnx_model, image_input)

    # Get the emotion label and confidence
    emotion_label, confidence = get_emotion_from_output(emotion_prediction)

    # Display the predicted emotion and confidence
    st.write(f"Predicted Emotion: {emotion_label}")
    st.write(f"Confidence: {confidence:.2f}")