echung682 commited on
Commit
dd19745
·
verified ·
1 Parent(s): c2f3fbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +91 -31
app.py CHANGED
@@ -55,17 +55,25 @@ def emotionAnalysis(message, face):
55
  text_dataDict["Emotion"].append(text_emotion)
56
  text_dataDict["Confidence Score"].append(round(text_score, 2))
57
 
58
- # Capture and process facial emotion
59
- img_rgb = cv2.cvtColor(face, cv2.COLOR_BGR2RGB)
60
-
61
- face_emotion, face_score = face_emotion_detector.top_emotion(img_rgb)
62
  face_timestamp = datetime.now().astimezone().strftime(localFormat)
63
-
64
- # Store facial emotion data for plotting
65
- face_dataDict["Time"].append(face_timestamp)
66
- face_dataDict["Emotion"].append(face_emotion)
67
- face_dataDict["Confidence Score"].append(face_score)
68
-
 
 
 
 
 
 
 
 
 
 
 
 
69
  # Return both the text result and the updated plot
70
  return f"Text: {text_emotion} | Face: {face_emotion}", displayResults()
71
 
@@ -96,27 +104,79 @@ def displayResults():
96
 
97
  return plt
98
 
99
- #with gr.Blocks as demo:
100
- #conesnt_radio = gr.Radio(["yes", "no"], label="This app uses your webcam to detect emotions from your face and reads your text inputs to determine emotions from your writing. Do you give consent? ")
101
-
102
- # Create Gradio interface with consent notice in the description
103
- interface = gr.Interface(
104
- fn=emotionAnalysis,
105
- inputs=[
106
- gr.Textbox(
107
- label="Enter your text",
108
- placeholder="Type your message here. Type 'quit' to see final results."
109
- ),
110
- gr.Image(label="Webcam Facial Expression", sources=['webcam'])
111
- ],
112
- outputs=[
113
- gr.Text(label="Emotion Results"),
114
- gr.Plot(label="Emotion Timeline")
115
- ],
116
- title="Emotion Analysis from Text and Face",
117
- description="⚠️ This application will use your webcam to detect facial emotions. By using this app, you consent to webcam access. Type text and press Enter to analyze both text and facial emotions."
118
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
 
120
  # Launch the interface
121
  if __name__ == "__main__":
122
- interface.launch()
 
55
  text_dataDict["Emotion"].append(text_emotion)
56
  text_dataDict["Confidence Score"].append(round(text_score, 2))
57
 
 
 
 
 
58
  face_timestamp = datetime.now().astimezone().strftime(localFormat)
59
+
60
+ if (face.any() == None):
61
+ face_emotion = "Unreadable"
62
+
63
+ face_dataDict["Time"].append(face_timestamp)
64
+ face_dataDict["Emotion"].append(face_emotion)
65
+ face_dataDict["Confidence Score"].append(0.0)
66
+ else:
67
+ # Capture and process facial emotion
68
+ img_rgb = cv2.cvtColor(face, cv2.COLOR_BGR2RGB)
69
+
70
+ face_emotion, face_score = face_emotion_detector.top_emotion(img_rgb)
71
+
72
+ # Store facial emotion data for plotting
73
+ face_dataDict["Time"].append(face_timestamp)
74
+ face_dataDict["Emotion"].append(face_emotion)
75
+ face_dataDict["Confidence Score"].append(face_score)
76
+
77
  # Return both the text result and the updated plot
78
  return f"Text: {text_emotion} | Face: {face_emotion}", displayResults()
79
 
 
104
 
105
  return plt
106
 
107
+
108
+ def process_webcam(img):
109
+ """
110
+ Process webcam frame and draw emotion detection results
111
+ Args:
112
+ img: Input image from webcam
113
+ Returns:
114
+ numpy.ndarray: Image with emotion detection results drawn on it
115
+ """
116
+ if img is None:
117
+ return None
118
+
119
+ try:
120
+ # Convert to RGB for emotion detection
121
+ #img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
122
+
123
+ # Detect faces and emotions
124
+ #result = face_emotion_detector.detect_emotions(img_rgb)
125
+
126
+ return img
127
+ except Exception as e:
128
+ print(f"Error processing image: {str(e)}")
129
+ return img
130
+
131
+ '''
132
+ 2 rows, 2 columns
133
+ column 1: inputs
134
+ row 1, col 1 = user text input
135
+ row 2, col 1 = camera live feed
136
+ column 2: outputs
137
+ row 1, col 2 = emotion results
138
+ row 2, col 2 = plt graph
139
+ '''
140
+ with gr.Blocks(title="Emotion Reader") as emotion_reader:
141
+ gr.Markdown(
142
+ """
143
+ # Emotion Analysis from Text and Face
144
+ ⚠️ This application will use your webcam to detect facial emotions. By using this app, you consent to webcam access. Type text and press Enter to analyze both text and facial emotions.
145
+ """
146
+ )
147
+ with gr.Row():
148
+ with gr.Column(): #user text input
149
+ text_input = gr.Textbox(
150
+ label="Enter your text",
151
+ placeholder="Type your message here. Type 'quit' to see final results."
152
+ )
153
+ with gr.Column(): #emotion results
154
+ emotion_result = gr.Textbox(label="Emotion Results")
155
+
156
+ with gr.Row():
157
+ with gr.Column(): #camera live feed
158
+ input_img = gr.Image(label="Webcam Feed", sources="webcam")
159
+ with gr.Column(): #plt graph
160
+ output_img = gr.Image(label="Emotion Detection", visible=False)
161
+ plot_output = gr.Plot(value=displayResults(), label="Emotion Timeline")
162
+
163
+ # Stream webcam with emotion detection
164
+ input_img.stream(
165
+ process_webcam,
166
+ inputs=input_img,
167
+ outputs=output_img,
168
+ time_limit=15,
169
+ stream_every=0.1,
170
+ concurrency_limit=30
171
+ )
172
+
173
+ # Process text input
174
+ text_input.submit(
175
+ emotionAnalysis,
176
+ inputs=[text_input, output_img],
177
+ outputs=[emotion_result, plot_output]
178
+ )
179
 
180
  # Launch the interface
181
  if __name__ == "__main__":
182
+ emotion_reader.launch()