Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,40 +1,40 @@
|
|
1 |
import streamlit as st
|
|
|
|
|
2 |
from PIL import Image
|
3 |
import numpy as np
|
4 |
-
import requests
|
5 |
-
from io import BytesIO
|
6 |
-
import cv2
|
7 |
-
from fer import FER # Ensure this package is in requirements.txt
|
8 |
|
9 |
-
# Title
|
10 |
-
st.title("Emotion Recognition
|
11 |
-
st.write("Upload a face image to detect emotions using AI!")
|
12 |
|
13 |
-
#
|
14 |
-
|
15 |
|
16 |
-
if
|
17 |
-
#
|
18 |
-
image = Image.open(
|
19 |
st.image(image, caption="Uploaded Image", use_column_width=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
-
#
|
22 |
-
|
23 |
-
|
24 |
-
# Process the image for emotion detection
|
25 |
-
st.write("Detecting emotions...")
|
26 |
-
detector = FER(mtcnn=True) # Initialize the FER+ detector
|
27 |
-
|
28 |
-
try:
|
29 |
-
# Detect emotions
|
30 |
-
result = detector.top_emotion(img_array)
|
31 |
-
if result:
|
32 |
-
emotion, score = result
|
33 |
-
st.write(f"Detected Emotion: **{emotion.capitalize()}** with confidence {score * 100:.2f}%")
|
34 |
-
else:
|
35 |
-
st.write("No clear emotion detected. Please try another image.")
|
36 |
-
except Exception as e:
|
37 |
-
st.error(f"Error processing the image: {e}")
|
38 |
-
|
39 |
-
# Footer
|
40 |
-
st.write("Emotion recognition powered by FER+")
|
|
|
1 |
import streamlit as st
|
2 |
+
from fer import FER # Import FER only for image processing
|
3 |
+
import cv2
|
4 |
from PIL import Image
|
5 |
import numpy as np
|
|
|
|
|
|
|
|
|
6 |
|
7 |
+
# Application Title
|
8 |
+
st.title("Emotion Recognition for Autism Support")
|
|
|
9 |
|
10 |
+
# Upload Image Section
|
11 |
+
uploaded_image = st.file_uploader("Upload an Image with a Face", type=["jpg", "jpeg", "png"])
|
12 |
|
13 |
+
if uploaded_image:
|
14 |
+
# Load the image
|
15 |
+
image = Image.open(uploaded_image)
|
16 |
st.image(image, caption="Uploaded Image", use_column_width=True)
|
17 |
+
|
18 |
+
# Convert PIL Image to NumPy Array
|
19 |
+
img_np = np.array(image)
|
20 |
+
|
21 |
+
# Convert RGB to BGR (required by OpenCV)
|
22 |
+
img_bgr = cv2.cvtColor(img_np, cv2.COLOR_RGB2BGR)
|
23 |
+
|
24 |
+
# Initialize FER detector
|
25 |
+
detector = FER(mtcnn=False) # Disable MTCNN if not needed
|
26 |
+
|
27 |
+
# Detect emotions
|
28 |
+
st.write("Analyzing emotions...")
|
29 |
+
result = detector.detect_emotions(img_bgr)
|
30 |
+
|
31 |
+
if result:
|
32 |
+
for face in result:
|
33 |
+
emotions = face['emotions']
|
34 |
+
st.write("Detected Emotions:")
|
35 |
+
st.json(emotions)
|
36 |
+
else:
|
37 |
+
st.warning("No faces detected in the image. Please try another image.")
|
38 |
|
39 |
+
# Provide Friendly Feedback
|
40 |
+
st.write("Emotion recognition will be refined in future updates!")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|