Ahmadkhan12 commited on
Commit
c096457
·
verified ·
1 Parent(s): a5b08ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -35
app.py CHANGED
@@ -1,38 +1,60 @@
1
  import streamlit as st
2
- from deepface import DeepFace
3
- from PIL import Image
4
  import numpy as np
 
 
5
 
6
- # Application Title
7
- st.title("Emotion Recognition for Autism Support")
8
-
9
- # Upload Image Section
10
- uploaded_image = st.file_uploader("Upload an Image with a Face", type=["jpg", "jpeg", "png"])
11
-
12
- if uploaded_image:
13
- # Display the uploaded image
14
- image = Image.open(uploaded_image)
15
- st.image(image, caption="Uploaded Image", use_column_width=True)
16
-
17
- # Convert PIL Image to NumPy array
18
- img_np = np.array(image)
19
-
20
- try:
21
- # Perform Emotion Analysis
22
- st.write("Analyzing emotions...")
23
- result = DeepFace.analyze(img_path=img_np, actions=["emotion"], enforce_detection=True)
24
-
25
- # Extract and Display Emotions
26
- if result and "emotion" in result:
27
- emotions = result["emotion"]
28
- st.write("Detected Emotions:")
29
- st.json(emotions)
30
-
31
- # Provide Friendly Feedback
32
- dominant_emotion = max(emotions, key=emotions.get)
33
- st.success(f"The dominant emotion is: {dominant_emotion}")
34
- else:
35
- st.warning("No emotions detected. Please try with another image.")
36
-
37
- except Exception as e:
38
- st.error(f"An error occurred: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ import face_recognition
 
3
  import numpy as np
4
+ import cv2
5
+ from PIL import Image
6
 
7
+ # Set the page config
8
+ st.set_page_config(page_title="Emotion Recognition App", layout="centered")
9
+
10
+ st.title("Emotion Recognition App")
11
+
12
+ # Upload an image
13
+ uploaded_file = st.file_uploader("Upload an image", type=["jpg", "jpeg", "png"])
14
+
15
+ # Define simple emotion mapping based on facial features (for demonstration purposes)
16
+ def detect_emotion(face_landmarks):
17
+ """
18
+ A simple mock-up function for detecting emotions based on landmarks.
19
+ Replace with a more sophisticated model as needed.
20
+ """
21
+ # Example: Arbitrarily assign "Happy" if eyes are close together
22
+ if face_landmarks:
23
+ return "Happy"
24
+ return "Neutral"
25
+
26
+ # Process the uploaded image
27
+ if uploaded_file is not None:
28
+ image = Image.open(uploaded_file)
29
+ image_np = np.array(image)
30
+
31
+ # Convert image to RGB
32
+ rgb_image = cv2.cvtColor(image_np, cv2.COLOR_BGR2RGB)
33
+
34
+ # Detect faces in the image
35
+ face_locations = face_recognition.face_locations(rgb_image)
36
+ face_landmarks_list = face_recognition.face_landmarks(rgb_image)
37
+
38
+ if face_locations:
39
+ for face_location, face_landmarks in zip(face_locations, face_landmarks_list):
40
+ # Draw a rectangle around the face
41
+ top, right, bottom, left = face_location
42
+ cv2.rectangle(image_np, (left, top), (right, bottom), (0, 255, 0), 2)
43
+
44
+ # Detect emotion based on landmarks
45
+ emotion = detect_emotion(face_landmarks)
46
+
47
+ # Display emotion above the face
48
+ cv2.putText(
49
+ image_np,
50
+ emotion,
51
+ (left, top - 10),
52
+ cv2.FONT_HERSHEY_SIMPLEX,
53
+ 0.9,
54
+ (255, 0, 0),
55
+ 2,
56
+ )
57
+
58
+ st.image(image_np, caption="Processed Image", use_column_width=True)
59
+ else:
60
+ st.warning("No faces detected in the image.")