File size: 2,494 Bytes
cdd7ccc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
import cv2
import numpy as np
from keras.models import model_from_json
from collections import Counter
import time
emotion_dict = {0: "Happy", 1: "Neutral/Sad", 2: "Sad"}
detected_emotions = [] # List to store detected emotions
# Function to reset the list of detected emotions
def reset_detected_emotions():
global detected_emotions
detected_emotions = []
# Function to process the frame and update the detected emotions
def process_frame(cap2, emotion_model):
global detected_emotions
ret, frame = cap2.read()
frame = cv2.resize(frame, (1280, 720))
face_detector = cv2.CascadeClassifier('emotion/haarcascades/haarcascade_frontalface_default.xml')
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
num_faces = face_detector.detectMultiScale(gray_frame, scaleFactor=1.3, minNeighbors=5)
for (x, y, w, h) in num_faces:
roi_gray_frame = gray_frame[y:y + h, x:x + w]
cropped_img = np.expand_dims(np.expand_dims(cv2.resize(roi_gray_frame, (48, 48)), -1), 0)
emotion_prediction = emotion_model.predict(cropped_img)
maxindex = int(np.argmax(emotion_prediction))
detected_emotions.append(emotion_dict[maxindex])
# Function to get the most common emotion from the list
def get_most_common_emotion():
global detected_emotions
if detected_emotions:
counter = Counter(detected_emotions)
most_common_emotion = counter.most_common(1)[0][0]
return most_common_emotion
else:
return None
def call_me():
# Load the emotion model
json_file = open('emotion/model/emotion_model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
emotion_model = model_from_json(loaded_model_json)
emotion_model.load_weights("emotion/model/emotion_model.h5")
print("Loaded model from disk")
# Start the webcam feed
cap2 = cv2.VideoCapture(0)
duration = 5 # seconds
end_time = time.time() + duration
# Example usage of the functions
while time.time() < end_time:
process_frame(cap2, emotion_model)
cap2.release()
# print(cap)
cv2.destroyAllWindows()
# Get the most common emotion detected during the session
most_common_emotion = get_most_common_emotion()
return most_common_emotion
# print("Most Common Emotion:", most_common_emotion)
# print("User's current Emotion:", most_common_emotion)
|