LovnishVerma commited on
Commit
addbe9f
·
verified ·
1 Parent(s): d97af89

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -7,6 +7,7 @@ from PIL import Image
7
  import sqlite3
8
  from huggingface_hub import HfApi
9
  from datetime import datetime
 
10
 
11
  # Constants
12
  KNOWN_FACES_DIR = "known_faces" # Directory to save user images
@@ -137,14 +138,18 @@ if st.checkbox("Show registered students"):
137
 
138
  # Face and Emotion Detection Function
139
  def detect_faces_and_emotions(image):
140
- gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
 
 
 
141
  face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
142
- faces = face_cascade.detectMultiScale(gray_image, scaleFactor=1.3, minNeighbors=5)
143
-
 
144
  for (x, y, w, h) in faces:
145
- face = gray_image[y:y+h, x:x+w]
146
  resized_face = cv2.resize(face, (48, 48)) # Resize face to 48x48
147
- rgb_face = cv2.cvtColor(resized_face, cv2.COLOR_GRAY2RGB)
148
  normalized_face = rgb_face / 255.0
149
  reshaped_face = np.reshape(normalized_face, (1, 48, 48, 3))
150
 
@@ -152,6 +157,7 @@ def detect_faces_and_emotions(image):
152
  emotion_prediction = emotion_model.predict(reshaped_face)
153
  emotion_label = np.argmax(emotion_prediction)
154
  return EMOTION_LABELS[emotion_label]
 
155
  return None
156
 
157
  # UI for Emotion Detection (Only using webcam now)
@@ -163,6 +169,8 @@ if st.sidebar.selectbox("Menu", ["Register Student", "Face Recognition and Emoti
163
  if camera_image:
164
  img = Image.open(camera_image)
165
  img_array = np.array(img)
 
 
166
  emotion_label = detect_faces_and_emotions(img_array)
167
  if emotion_label:
168
  st.success(f"Emotion Detected: {emotion_label}")
 
7
  import sqlite3
8
  from huggingface_hub import HfApi
9
  from datetime import datetime
10
+ import face_recognition
11
 
12
  # Constants
13
  KNOWN_FACES_DIR = "known_faces" # Directory to save user images
 
138
 
139
  # Face and Emotion Detection Function
140
  def detect_faces_and_emotions(image):
141
+ # Convert the image to RGB (as OpenCV works with BGR by default)
142
+ rgb_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
143
+
144
+ # Detect faces using OpenCV's Haar Cascade
145
  face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
146
+ faces = face_cascade.detectMultiScale(rgb_image, scaleFactor=1.3, minNeighbors=5)
147
+
148
+ # If faces are detected, predict emotions
149
  for (x, y, w, h) in faces:
150
+ face = rgb_image[y:y+h, x:x+w]
151
  resized_face = cv2.resize(face, (48, 48)) # Resize face to 48x48
152
+ rgb_face = cv2.cvtColor(resized_face, cv2.COLOR_BGR2RGB)
153
  normalized_face = rgb_face / 255.0
154
  reshaped_face = np.reshape(normalized_face, (1, 48, 48, 3))
155
 
 
157
  emotion_prediction = emotion_model.predict(reshaped_face)
158
  emotion_label = np.argmax(emotion_prediction)
159
  return EMOTION_LABELS[emotion_label]
160
+
161
  return None
162
 
163
  # UI for Emotion Detection (Only using webcam now)
 
169
  if camera_image:
170
  img = Image.open(camera_image)
171
  img_array = np.array(img)
172
+
173
+ # Detect emotion in the captured image
174
  emotion_label = detect_faces_and_emotions(img_array)
175
  if emotion_label:
176
  st.success(f"Emotion Detected: {emotion_label}")