aryan083 commited on
Commit
4983aaa
Β·
1 Parent(s): e80c1f8

please work

Browse files
Files changed (9) hide show
  1. Dockerfile +18 -0
  2. animate.zip +3 -0
  3. app.py +131 -0
  4. facedec.ipynb +296 -0
  5. model_attempt.ipynb +124 -0
  6. requirements.txt +6 -0
  7. static/index.html +663 -0
  8. static/script.js +225 -0
  9. static/style.css +349 -0
Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Base image with Python and necessary tools
2
+ FROM python:3.12
3
+
4
+ # Set the working directory in the container
5
+ WORKDIR /app
6
+
7
+ # Copy the application files to the container
8
+ COPY . /app
9
+
10
+
11
+ # Install Python dependencies
12
+ RUN pip install --no-cache-dir -r requirements.txt
13
+
14
+ # Expose the port for the web server
15
+ EXPOSE 7860
16
+
17
+ # Command to run the application
18
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:app"]
animate.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b85742cd6d924ac74dcc8be48215c75fba8536335e4946af53336ff689199917
3
+ size 1580600
app.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import base64
3
+ import io
4
+ from PIL import Image
5
+ from flask_cors import CORS
6
+ from transformers import pipeline
7
+
8
+ # Initialize Flask app
9
+ app = Flask(__name__)
10
+ CORS(app)
11
+
12
+ # Load the Hugging Face pipeline
13
+ pipe = pipeline("image-classification", model="trpakov/vit-face-expression")
14
+
15
+ # Emotion to emoji mapping
16
+ emotion_to_emoji = {
17
+ "angry": "\ud83d\ude20",
18
+ "disgust": "\ud83e\udd2e",
19
+ "fear": "\ud83d\ude28",
20
+ "happy": "\ud83d\ude0a",
21
+ "sad": "\ud83d\ude22",
22
+ "surprise": "\ud83d\ude32",
23
+ "neutral": "\ud83d\ude10"
24
+ }
25
+
26
+ def preprocess_image(image_data):
27
+ try:
28
+ # Decode base64 string to bytes
29
+ image_bytes = base64.b64decode(image_data)
30
+
31
+ # Open the image
32
+ img = Image.open(io.BytesIO(image_bytes))
33
+
34
+ # Convert to RGB if needed
35
+ if img.mode != 'RGB':
36
+ img = img.convert('RGB')
37
+
38
+ # Save grayscale version for response
39
+ img_gray = img.convert('L')
40
+ buffered = io.BytesIO()
41
+ img_gray.save(buffered, format="PNG")
42
+ grayscale_image_base64 = base64.b64encode(buffered.getvalue()).decode()
43
+
44
+ return {
45
+ "image": img,
46
+ "grayscale_base64": grayscale_image_base64
47
+ }
48
+ except Exception as e:
49
+ print("Error in preprocess_image:", str(e))
50
+ raise
51
+
52
+ @app.route('/', methods=['GET']) # Fixed keyword 'methods'
53
+ def index():
54
+ return app.send_static_file('index.html')
55
+
56
+ @app.route('/upload', methods=['POST'])
57
+ def upload_image():
58
+ try:
59
+ data = request.get_json()
60
+ if not data or 'image' not in data:
61
+ return jsonify({'error': 'No image data provided'}), 400
62
+
63
+ # Preprocess image and get steps
64
+ preprocessing_results = preprocess_image(data['image'])
65
+ img = preprocessing_results["image"]
66
+
67
+ # Run inference
68
+ predictions = pipe(img)
69
+ top_prediction = predictions[0]
70
+
71
+ emotion = top_prediction['label'].lower()
72
+ emoji = emotion_to_emoji.get(emotion, "\ud83e\udd14")
73
+
74
+ # Get probabilities for all emotions
75
+ prob_dict = {pred['label'].lower(): float(pred['score']) for pred in predictions}
76
+
77
+ # Detailed process steps
78
+ process_steps = {
79
+ "image_acquisition": [
80
+ "Webcam capture using getUserMedia API",
81
+ "Canvas API used for image capture",
82
+ "Base64 encoding for data transfer",
83
+ "CORS-enabled secure transmission"
84
+ ],
85
+ "preprocessing": [
86
+ "Base64 decoding to binary data",
87
+ "PIL Image processing pipeline",
88
+ "RGB format conversion",
89
+ "Grayscale conversion for visualization",
90
+ "Image resizing and normalization"
91
+ ],
92
+ "model_pipeline": [
93
+ "Hugging Face Transformers pipeline",
94
+ "ViT-based image classification",
95
+ "Multi-head self-attention mechanism",
96
+ "Feature extraction from image patches",
97
+ "Emotion classification head"
98
+ ],
99
+ "classification": [
100
+ "7-class emotion detection",
101
+ "Softmax probability distribution",
102
+ "Confidence score calculation",
103
+ "Emoji mapping for visualization",
104
+ "Real-time result generation"
105
+ ]
106
+ }
107
+
108
+ response_data = {
109
+ "emotion": emotion,
110
+ "emoji": emoji,
111
+ "grayscale_image": f"data:image/png;base64,{preprocessing_results['grayscale_base64']}",
112
+ "model_probabilities": prob_dict,
113
+ "processing_steps": {
114
+ "original_size": img.size,
115
+ "color_mode": img.mode,
116
+ "detailed_steps": process_steps,
117
+ "model_type": "Vision Transformer (ViT)",
118
+ "input_shape": "224x224x3",
119
+ "output_classes": "7 emotions (angry, disgust, fear, happy, sad, surprise, neutral)"
120
+ }
121
+ }
122
+
123
+ print("Response data:", response_data) # Debug print
124
+ return jsonify(response_data)
125
+
126
+ except Exception as e:
127
+ print(f"Error processing image: {str(e)}")
128
+ return jsonify({'error': str(e)}), 500
129
+
130
+ if __name__ == '__main__':
131
+ app.run(debug=True)
facedec.ipynb ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "name": "stderr",
10
+ "output_type": "stream",
11
+ "text": [
12
+ "c:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\keras\\src\\layers\\convolutional\\base_conv.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n",
13
+ " super().__init__(activity_regularizer=activity_regularizer, **kwargs)\n",
14
+ "WARNING:absl:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model.\n",
15
+ "WARNING:absl:Error in loading the saved optimizer state. As a result, your model is starting with a freshly initialized optimizer.\n"
16
+ ]
17
+ },
18
+ {
19
+ "name": "stdout",
20
+ "output_type": "stream",
21
+ "text": [
22
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 313ms/step\n",
23
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
24
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
25
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step\n",
26
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
27
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
28
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
29
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
30
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
31
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
32
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step\n",
33
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
34
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
35
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
36
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
37
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
38
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
39
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
40
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
41
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
42
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
43
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
44
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
45
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
46
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 23ms/step\n",
47
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
48
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
49
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
50
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
51
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
52
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
53
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 29ms/step\n",
54
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
55
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
56
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step\n",
57
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step\n",
58
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
59
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 24ms/step\n",
60
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
61
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
62
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
63
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
64
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
65
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
66
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 24ms/step\n",
67
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
68
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
69
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step\n",
70
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
71
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 35ms/step\n",
72
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
73
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 26ms/step\n",
74
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
75
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 14ms/step\n",
76
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
77
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
78
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
79
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
80
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
81
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
82
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step\n",
83
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
84
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
85
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
86
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
87
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
88
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
89
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step\n",
90
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
91
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
92
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
93
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
94
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
95
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 10ms/step\n",
96
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
97
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 24ms/step\n",
98
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
99
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 27ms/step\n",
100
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
101
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
102
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
103
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
104
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 24ms/step\n",
105
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
106
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
107
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
108
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
109
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
110
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
111
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
112
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 35ms/step\n",
113
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step\n",
114
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
115
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
116
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
117
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
118
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
119
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
120
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
121
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
122
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step\n",
123
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
124
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
125
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
126
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
127
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
128
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
129
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 13ms/step\n",
130
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
131
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
132
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
133
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
134
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
135
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
136
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
137
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
138
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 26ms/step\n",
139
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 23ms/step\n",
140
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 30ms/step\n",
141
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 23ms/step\n",
142
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
143
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 24ms/step\n",
144
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
145
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
146
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
147
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
148
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 43ms/step\n",
149
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
150
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
151
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 23ms/step\n",
152
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
153
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n",
154
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
155
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 15ms/step\n",
156
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 54ms/step\n",
157
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
158
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
159
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 16ms/step\n",
160
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
161
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
162
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
163
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 21ms/step\n",
164
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
165
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 17ms/step\n",
166
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 27ms/step\n",
167
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
168
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
169
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 18ms/step\n",
170
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 20ms/step\n",
171
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 19ms/step\n",
172
+ "\u001b[1m1/1\u001b[0m \u001b[32m━━━━━━━━━━━━━━━━━━━━\u001b[0m\u001b[37m\u001b[0m \u001b[1m0s\u001b[0m 22ms/step\n"
173
+ ]
174
+ },
175
+ {
176
+ "ename": "KeyboardInterrupt",
177
+ "evalue": "",
178
+ "output_type": "error",
179
+ "traceback": [
180
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
181
+ "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
182
+ "Cell \u001b[1;32mIn[2], line 41\u001b[0m\n\u001b[0;32m 38\u001b[0m face_image \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39mvstack([face_image])\n\u001b[0;32m 40\u001b[0m \u001b[38;5;66;03m# Predict emotion using the loaded model\u001b[39;00m\n\u001b[1;32m---> 41\u001b[0m predictions \u001b[38;5;241m=\u001b[39m \u001b[43mmodel_best\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpredict\u001b[49m\u001b[43m(\u001b[49m\u001b[43mface_image\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 42\u001b[0m emotion_label \u001b[38;5;241m=\u001b[39m class_names[np\u001b[38;5;241m.\u001b[39margmax(predictions)]\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# Display the emotion label on the frame\u001b[39;00m\n",
183
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\keras\\src\\utils\\traceback_utils.py:117\u001b[0m, in \u001b[0;36mfilter_traceback.<locals>.error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 115\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 116\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 117\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 118\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 119\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m _process_traceback_frames(e\u001b[38;5;241m.\u001b[39m__traceback__)\n",
184
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\keras\\src\\backend\\tensorflow\\trainer.py:448\u001b[0m, in \u001b[0;36mTensorFlowTrainer.predict\u001b[1;34m(self, x, batch_size, verbose, steps, callbacks)\u001b[0m\n\u001b[0;32m 443\u001b[0m \u001b[38;5;129m@traceback_utils\u001b[39m\u001b[38;5;241m.\u001b[39mfilter_traceback\n\u001b[0;32m 444\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpredict\u001b[39m(\n\u001b[0;32m 445\u001b[0m \u001b[38;5;28mself\u001b[39m, x, batch_size\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mauto\u001b[39m\u001b[38;5;124m\"\u001b[39m, steps\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, callbacks\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 446\u001b[0m ):\n\u001b[0;32m 447\u001b[0m \u001b[38;5;66;03m# Create an iterator that yields batches of input data.\u001b[39;00m\n\u001b[1;32m--> 448\u001b[0m epoch_iterator \u001b[38;5;241m=\u001b[39m \u001b[43mTFEpochIterator\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 449\u001b[0m \u001b[43m \u001b[49m\u001b[43mx\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mx\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 450\u001b[0m \u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbatch_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 451\u001b[0m \u001b[43m \u001b[49m\u001b[43msteps_per_epoch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msteps\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 452\u001b[0m \u001b[43m \u001b[49m\u001b[43mshuffle\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 453\u001b[0m \u001b[43m \u001b[49m\u001b[43mdistribute_strategy\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdistribute_strategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 454\u001b[0m \u001b[43m \u001b[49m\u001b[43msteps_per_execution\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msteps_per_execution\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 455\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 457\u001b[0m \u001b[38;5;66;03m# Container that configures and calls callbacks.\u001b[39;00m\n\u001b[0;32m 458\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(callbacks, callbacks_module\u001b[38;5;241m.\u001b[39mCallbackList):\n",
185
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\keras\\src\\backend\\tensorflow\\trainer.py:666\u001b[0m, in \u001b[0;36mTFEpochIterator.__init__\u001b[1;34m(self, distribute_strategy, *args, **kwargs)\u001b[0m\n\u001b[0;32m 664\u001b[0m \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 665\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_distribute_strategy \u001b[38;5;241m=\u001b[39m distribute_strategy\n\u001b[1;32m--> 666\u001b[0m dataset \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_iterator\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 667\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(dataset, tf\u001b[38;5;241m.\u001b[39mdistribute\u001b[38;5;241m.\u001b[39mDistributedDataset):\n\u001b[0;32m 668\u001b[0m dataset \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_distribute_strategy\u001b[38;5;241m.\u001b[39mexperimental_distribute_dataset(\n\u001b[0;32m 669\u001b[0m dataset\n\u001b[0;32m 670\u001b[0m )\n",
186
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\keras\\src\\backend\\tensorflow\\trainer.py:675\u001b[0m, in \u001b[0;36mTFEpochIterator._get_iterator\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 674\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_get_iterator\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m--> 675\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdata_adapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_tf_dataset\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
187
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\keras\\src\\trainers\\data_adapters\\array_data_adapter.py:232\u001b[0m, in \u001b[0;36mArrayDataAdapter.get_tf_dataset\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 229\u001b[0m dataset \u001b[38;5;241m=\u001b[39m dataset\u001b[38;5;241m.\u001b[39mwith_options(options)\n\u001b[0;32m 230\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m dataset\n\u001b[1;32m--> 232\u001b[0m indices_dataset \u001b[38;5;241m=\u001b[39m \u001b[43mindices_dataset\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mflat_map\u001b[49m\u001b[43m(\u001b[49m\u001b[43mslice_batch_indices\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 233\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m shuffle \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[0;32m 234\u001b[0m indices_dataset \u001b[38;5;241m=\u001b[39m indices_dataset\u001b[38;5;241m.\u001b[39mmap(tf\u001b[38;5;241m.\u001b[39mrandom\u001b[38;5;241m.\u001b[39mshuffle)\n",
188
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\data\\ops\\dataset_ops.py:2389\u001b[0m, in \u001b[0;36mDatasetV2.flat_map\u001b[1;34m(self, map_func, name)\u001b[0m\n\u001b[0;32m 2385\u001b[0m \u001b[38;5;66;03m# Loaded lazily due to a circular dependency (dataset_ops -> flat_map_op ->\u001b[39;00m\n\u001b[0;32m 2386\u001b[0m \u001b[38;5;66;03m# dataset_ops).\u001b[39;00m\n\u001b[0;32m 2387\u001b[0m \u001b[38;5;66;03m# pylint: disable=g-import-not-at-top,protected-access\u001b[39;00m\n\u001b[0;32m 2388\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtensorflow\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mpython\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdata\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mops\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m flat_map_op\n\u001b[1;32m-> 2389\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mflat_map_op\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_flat_map\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmap_func\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n",
189
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\data\\ops\\flat_map_op.py:24\u001b[0m, in \u001b[0;36m_flat_map\u001b[1;34m(input_dataset, map_func, name)\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_flat_map\u001b[39m(input_dataset, map_func, name\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m): \u001b[38;5;66;03m# pylint: disable=unused-private-name\u001b[39;00m\n\u001b[0;32m 23\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"See `Dataset.flat_map()` for details.\"\"\"\u001b[39;00m\n\u001b[1;32m---> 24\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_FlatMapDataset\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_dataset\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmap_func\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n",
190
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\data\\ops\\flat_map_op.py:33\u001b[0m, in \u001b[0;36m_FlatMapDataset.__init__\u001b[1;34m(self, input_dataset, map_func, name)\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28mself\u001b[39m, input_dataset, map_func, name\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[0;32m 32\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_input_dataset \u001b[38;5;241m=\u001b[39m input_dataset\n\u001b[1;32m---> 33\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_map_func \u001b[38;5;241m=\u001b[39m \u001b[43mstructured_function\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mStructuredFunctionWrapper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 34\u001b[0m \u001b[43m \u001b[49m\u001b[43mmap_func\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_transformation_name\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdataset\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minput_dataset\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_map_func\u001b[38;5;241m.\u001b[39moutput_structure, dataset_ops\u001b[38;5;241m.\u001b[39mDatasetSpec):\n\u001b[0;32m 36\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\n\u001b[0;32m 37\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThe `map_func` argument must return a `Dataset` object. Got \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 38\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdataset_ops\u001b[38;5;241m.\u001b[39mget_type(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_map_func\u001b[38;5;241m.\u001b[39moutput_structure)\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
191
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\data\\ops\\structured_function.py:265\u001b[0m, in \u001b[0;36mStructuredFunctionWrapper.__init__\u001b[1;34m(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)\u001b[0m\n\u001b[0;32m 258\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(\n\u001b[0;32m 259\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mEven though the `tf.config.experimental_run_functions_eagerly` \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 260\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moption is set, this option does not apply to tf.data functions. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 261\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTo force eager execution of tf.data functions, please use \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 262\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m`tf.data.experimental.enable_debug_mode()`.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 263\u001b[0m fn_factory \u001b[38;5;241m=\u001b[39m trace_tf_function(defun_kwargs)\n\u001b[1;32m--> 265\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_function \u001b[38;5;241m=\u001b[39m \u001b[43mfn_factory\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 266\u001b[0m \u001b[38;5;66;03m# There is no graph to add in eager mode.\u001b[39;00m\n\u001b[0;32m 267\u001b[0m add_to_graph \u001b[38;5;241m&\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m context\u001b[38;5;241m.\u001b[39mexecuting_eagerly()\n",
192
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:1251\u001b[0m, in \u001b[0;36mFunction.get_concrete_function\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1249\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_concrete_function\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[0;32m 1250\u001b[0m \u001b[38;5;66;03m# Implements PolymorphicFunction.get_concrete_function.\u001b[39;00m\n\u001b[1;32m-> 1251\u001b[0m concrete \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_concrete_function_garbage_collected\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1252\u001b[0m concrete\u001b[38;5;241m.\u001b[39m_garbage_collector\u001b[38;5;241m.\u001b[39mrelease() \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n\u001b[0;32m 1253\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m concrete\n",
193
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:1221\u001b[0m, in \u001b[0;36mFunction._get_concrete_function_garbage_collected\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 1219\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_variable_creation_config \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 1220\u001b[0m initializers \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m-> 1221\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_initialize\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43madd_initializers_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minitializers\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1222\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_initialize_uninitialized_variables(initializers)\n\u001b[0;32m 1224\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_created_variables:\n\u001b[0;32m 1225\u001b[0m \u001b[38;5;66;03m# In this case we have created variables on the first call, so we run the\u001b[39;00m\n\u001b[0;32m 1226\u001b[0m \u001b[38;5;66;03m# version which is guaranteed to never create variables.\u001b[39;00m\n",
194
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\polymorphic_function.py:696\u001b[0m, in \u001b[0;36mFunction._initialize\u001b[1;34m(self, args, kwds, add_initializers_to)\u001b[0m\n\u001b[0;32m 691\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_variable_creation_config \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate_scoped_tracing_options(\n\u001b[0;32m 692\u001b[0m variable_capturing_scope,\n\u001b[0;32m 693\u001b[0m tracing_compilation\u001b[38;5;241m.\u001b[39mScopeType\u001b[38;5;241m.\u001b[39mVARIABLE_CREATION,\n\u001b[0;32m 694\u001b[0m )\n\u001b[0;32m 695\u001b[0m \u001b[38;5;66;03m# Force the definition of the function for these arguments\u001b[39;00m\n\u001b[1;32m--> 696\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_concrete_variable_creation_fn \u001b[38;5;241m=\u001b[39m \u001b[43mtracing_compilation\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrace_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 697\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwds\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_variable_creation_config\u001b[49m\n\u001b[0;32m 698\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 700\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21minvalid_creator_scope\u001b[39m(\u001b[38;5;241m*\u001b[39munused_args, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39munused_kwds):\n\u001b[0;32m 701\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Disables variable creation.\"\"\"\u001b[39;00m\n",
195
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\tracing_compilation.py:178\u001b[0m, in \u001b[0;36mtrace_function\u001b[1;34m(args, kwargs, tracing_options)\u001b[0m\n\u001b[0;32m 175\u001b[0m args \u001b[38;5;241m=\u001b[39m tracing_options\u001b[38;5;241m.\u001b[39minput_signature\n\u001b[0;32m 176\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m {}\n\u001b[1;32m--> 178\u001b[0m concrete_function \u001b[38;5;241m=\u001b[39m \u001b[43m_maybe_define_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 179\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtracing_options\u001b[49m\n\u001b[0;32m 180\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 182\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m tracing_options\u001b[38;5;241m.\u001b[39mbind_graph_to_function:\n\u001b[0;32m 183\u001b[0m concrete_function\u001b[38;5;241m.\u001b[39m_garbage_collector\u001b[38;5;241m.\u001b[39mrelease() \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n",
196
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\tracing_compilation.py:283\u001b[0m, in \u001b[0;36m_maybe_define_function\u001b[1;34m(args, kwargs, tracing_options)\u001b[0m\n\u001b[0;32m 281\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 282\u001b[0m target_func_type \u001b[38;5;241m=\u001b[39m lookup_func_type\n\u001b[1;32m--> 283\u001b[0m concrete_function \u001b[38;5;241m=\u001b[39m \u001b[43m_create_concrete_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 284\u001b[0m \u001b[43m \u001b[49m\u001b[43mtarget_func_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlookup_func_context\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunc_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtracing_options\u001b[49m\n\u001b[0;32m 285\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 287\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m tracing_options\u001b[38;5;241m.\u001b[39mfunction_cache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 288\u001b[0m tracing_options\u001b[38;5;241m.\u001b[39mfunction_cache\u001b[38;5;241m.\u001b[39madd(\n\u001b[0;32m 289\u001b[0m concrete_function, current_func_context\n\u001b[0;32m 290\u001b[0m )\n",
197
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\eager\\polymorphic_function\\tracing_compilation.py:310\u001b[0m, in \u001b[0;36m_create_concrete_function\u001b[1;34m(function_type, type_context, func_graph, tracing_options)\u001b[0m\n\u001b[0;32m 303\u001b[0m placeholder_bound_args \u001b[38;5;241m=\u001b[39m function_type\u001b[38;5;241m.\u001b[39mplaceholder_arguments(\n\u001b[0;32m 304\u001b[0m placeholder_context\n\u001b[0;32m 305\u001b[0m )\n\u001b[0;32m 307\u001b[0m disable_acd \u001b[38;5;241m=\u001b[39m tracing_options\u001b[38;5;241m.\u001b[39mattributes \u001b[38;5;129;01mand\u001b[39;00m tracing_options\u001b[38;5;241m.\u001b[39mattributes\u001b[38;5;241m.\u001b[39mget(\n\u001b[0;32m 308\u001b[0m attributes_lib\u001b[38;5;241m.\u001b[39mDISABLE_ACD, \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[0;32m 309\u001b[0m )\n\u001b[1;32m--> 310\u001b[0m traced_func_graph \u001b[38;5;241m=\u001b[39m \u001b[43mfunc_graph_module\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunc_graph_from_py_func\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 311\u001b[0m \u001b[43m \u001b[49m\u001b[43mtracing_options\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 312\u001b[0m \u001b[43m \u001b[49m\u001b[43mtracing_options\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpython_function\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 313\u001b[0m \u001b[43m \u001b[49m\u001b[43mplaceholder_bound_args\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 314\u001b[0m \u001b[43m \u001b[49m\u001b[43mplaceholder_bound_args\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 315\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 316\u001b[0m \u001b[43m \u001b[49m\u001b[43mfunc_graph\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfunc_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 317\u001b[0m \u001b[43m \u001b[49m\u001b[43madd_control_dependencies\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdisable_acd\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 318\u001b[0m \u001b[43m \u001b[49m\u001b[43marg_names\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfunction_type_utils\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto_arg_names\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunction_type\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 319\u001b[0m \u001b[43m \u001b[49m\u001b[43mcreate_placeholders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 320\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 322\u001b[0m transform\u001b[38;5;241m.\u001b[39mapply_func_graph_transforms(traced_func_graph)\n\u001b[0;32m 324\u001b[0m graph_capture_container \u001b[38;5;241m=\u001b[39m traced_func_graph\u001b[38;5;241m.\u001b[39mfunction_captures\n",
198
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py:987\u001b[0m, in \u001b[0;36mfunc_graph_from_py_func\u001b[1;34m(name, python_func, args, kwargs, signature, func_graph, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, create_placeholders)\u001b[0m\n\u001b[0;32m 984\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 985\u001b[0m deps_control_manager \u001b[38;5;241m=\u001b[39m ops\u001b[38;5;241m.\u001b[39mNullContextmanager()\n\u001b[1;32m--> 987\u001b[0m \u001b[43m\u001b[49m\u001b[38;5;28;43;01mwith\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mfunc_graph\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mas_default\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdeps_control_manager\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mas\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdeps_ctx\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 988\u001b[0m \u001b[43m \u001b[49m\u001b[43mcurrent_scope\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mvariable_scope\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_variable_scope\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 989\u001b[0m \u001b[43m \u001b[49m\u001b[43mdefault_use_resource\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mcurrent_scope\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43muse_resource\u001b[49m\n",
199
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\auto_control_deps.py:533\u001b[0m, in \u001b[0;36mAutomaticControlDependencies.__exit__\u001b[1;34m(self, unused_type, unused_value, unused_traceback)\u001b[0m\n\u001b[0;32m 526\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m r\u001b[38;5;241m.\u001b[39mgraph\u001b[38;5;241m.\u001b[39mbuilding_function:\n\u001b[0;32m 527\u001b[0m \u001b[38;5;66;03m# There may be many stateful ops in the graph. Adding them as\u001b[39;00m\n\u001b[0;32m 528\u001b[0m \u001b[38;5;66;03m# control inputs to each function output could create excessive\u001b[39;00m\n\u001b[0;32m 529\u001b[0m \u001b[38;5;66;03m# control edges in the graph. Thus we create an intermediate No-op to\u001b[39;00m\n\u001b[0;32m 530\u001b[0m \u001b[38;5;66;03m# chain the control dependencies between stateful ops and function\u001b[39;00m\n\u001b[0;32m 531\u001b[0m \u001b[38;5;66;03m# outputs.\u001b[39;00m\n\u001b[0;32m 532\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m idx \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m--> 533\u001b[0m control_output_op \u001b[38;5;241m=\u001b[39m \u001b[43mcontrol_flow_ops\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mno_op\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 534\u001b[0m control_output_op\u001b[38;5;241m.\u001b[39m_add_control_inputs(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mops_which_must_run)\n\u001b[0;32m 535\u001b[0m updated_ops_which_must_run \u001b[38;5;241m=\u001b[39m [control_output_op]\n",
200
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\ops\\gen_control_flow_ops.py:531\u001b[0m, in \u001b[0;36mno_op\u001b[1;34m(name)\u001b[0m\n\u001b[0;32m 529\u001b[0m \u001b[38;5;66;03m# Add nodes to the TensorFlow graph.\u001b[39;00m\n\u001b[0;32m 530\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 531\u001b[0m _, _, _op, _outputs \u001b[38;5;241m=\u001b[39m \u001b[43m_op_def_library\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply_op_helper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 532\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mNoOp\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 533\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mTypeError\u001b[39;00m, \u001b[38;5;167;01mValueError\u001b[39;00m):\n\u001b[0;32m 534\u001b[0m _result \u001b[38;5;241m=\u001b[39m _dispatch\u001b[38;5;241m.\u001b[39mdispatch(\n\u001b[0;32m 535\u001b[0m no_op, (), \u001b[38;5;28mdict\u001b[39m(name\u001b[38;5;241m=\u001b[39mname)\n\u001b[0;32m 536\u001b[0m )\n",
201
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py:796\u001b[0m, in \u001b[0;36m_apply_op_helper\u001b[1;34m(op_type_name, name, **keywords)\u001b[0m\n\u001b[0;32m 791\u001b[0m must_colocate_inputs \u001b[38;5;241m=\u001b[39m [val \u001b[38;5;28;01mfor\u001b[39;00m arg, val \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(op_def\u001b[38;5;241m.\u001b[39minput_arg, inputs)\n\u001b[0;32m 792\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m arg\u001b[38;5;241m.\u001b[39mis_ref]\n\u001b[0;32m 793\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _MaybeColocateWith(must_colocate_inputs):\n\u001b[0;32m 794\u001b[0m \u001b[38;5;66;03m# Add Op to graph\u001b[39;00m\n\u001b[0;32m 795\u001b[0m \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n\u001b[1;32m--> 796\u001b[0m op \u001b[38;5;241m=\u001b[39m \u001b[43mg\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_create_op_internal\u001b[49m\u001b[43m(\u001b[49m\u001b[43mop_type_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtypes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m 797\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mscope\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minput_types\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minput_types\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 798\u001b[0m \u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattr_protos\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mop_def\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mop_def\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 800\u001b[0m \u001b[38;5;66;03m# `outputs` is returned as a separate return value so that the output\u001b[39;00m\n\u001b[0;32m 801\u001b[0m \u001b[38;5;66;03m# tensors can the `op` per se can be decoupled so that the\u001b[39;00m\n\u001b[0;32m 802\u001b[0m \u001b[38;5;66;03m# `op_callbacks` can function properly. See framework/op_callbacks.py\u001b[39;00m\n\u001b[0;32m 803\u001b[0m \u001b[38;5;66;03m# for more details.\u001b[39;00m\n\u001b[0;32m 804\u001b[0m outputs \u001b[38;5;241m=\u001b[39m op\u001b[38;5;241m.\u001b[39moutputs\n",
202
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py:670\u001b[0m, in \u001b[0;36mFuncGraph._create_op_internal\u001b[1;34m(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_device)\u001b[0m\n\u001b[0;32m 668\u001b[0m inp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcapture(inp)\n\u001b[0;32m 669\u001b[0m captured_inputs\u001b[38;5;241m.\u001b[39mappend(inp)\n\u001b[1;32m--> 670\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_create_op_internal\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# pylint: disable=protected-access\u001b[39;49;00m\n\u001b[0;32m 671\u001b[0m \u001b[43m \u001b[49m\u001b[43mop_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcaptured_inputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minput_types\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mop_def\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 672\u001b[0m \u001b[43m \u001b[49m\u001b[43mcompute_device\u001b[49m\u001b[43m)\u001b[49m\n",
203
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\ops.py:2701\u001b[0m, in \u001b[0;36mGraph._create_op_internal\u001b[1;34m(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_device)\u001b[0m\n\u001b[0;32m 2698\u001b[0m \u001b[38;5;66;03m# _create_op_helper mutates the new Operation. `_mutation_lock` ensures a\u001b[39;00m\n\u001b[0;32m 2699\u001b[0m \u001b[38;5;66;03m# Session.run call cannot occur between creating and mutating the op.\u001b[39;00m\n\u001b[0;32m 2700\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mutation_lock():\n\u001b[1;32m-> 2701\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43mOperation\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfrom_node_def\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 2702\u001b[0m \u001b[43m \u001b[49m\u001b[43mnode_def\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2703\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2704\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2705\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_types\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdtypes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2706\u001b[0m \u001b[43m \u001b[49m\u001b[43mcontrol_inputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcontrol_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2707\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_types\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minput_types\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2708\u001b[0m \u001b[43m \u001b[49m\u001b[43moriginal_op\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_default_original_op\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2709\u001b[0m \u001b[43m \u001b[49m\u001b[43mop_def\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mop_def\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 2710\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 2711\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_create_op_helper(ret, compute_device\u001b[38;5;241m=\u001b[39mcompute_device)\n\u001b[0;32m 2712\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ret\n",
204
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\ops.py:1196\u001b[0m, in \u001b[0;36mOperation.from_node_def\u001b[1;34m(***failed resolving arguments***)\u001b[0m\n\u001b[0;32m 1193\u001b[0m control_input_ops\u001b[38;5;241m.\u001b[39mappend(control_op)\n\u001b[0;32m 1195\u001b[0m \u001b[38;5;66;03m# Initialize c_op from node_def and other inputs\u001b[39;00m\n\u001b[1;32m-> 1196\u001b[0m c_op \u001b[38;5;241m=\u001b[39m \u001b[43m_create_c_op\u001b[49m\u001b[43m(\u001b[49m\u001b[43mg\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnode_def\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcontrol_input_ops\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mop_def\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mop_def\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1197\u001b[0m \u001b[38;5;28mself\u001b[39m \u001b[38;5;241m=\u001b[39m Operation(c_op, SymbolicTensor)\n\u001b[0;32m 1198\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_init(g)\n",
205
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\util\\traceback_utils.py:150\u001b[0m, in \u001b[0;36mfilter_traceback.<locals>.error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 148\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 149\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 150\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 151\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 152\u001b[0m filtered_tb \u001b[38;5;241m=\u001b[39m _process_traceback_frames(e\u001b[38;5;241m.\u001b[39m__traceback__)\n",
206
+ "File \u001b[1;32mc:\\Users\\Aryan\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\site-packages\\tensorflow\\python\\framework\\ops.py:1026\u001b[0m, in \u001b[0;36m_create_c_op\u001b[1;34m(graph, node_def, inputs, control_inputs, op_def, extract_traceback)\u001b[0m\n\u001b[0;32m 1024\u001b[0m \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n\u001b[0;32m 1025\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m graph\u001b[38;5;241m.\u001b[39m_c_graph\u001b[38;5;241m.\u001b[39mget() \u001b[38;5;28;01mas\u001b[39;00m c_graph:\n\u001b[1;32m-> 1026\u001b[0m op_desc \u001b[38;5;241m=\u001b[39m \u001b[43mpywrap_tf_session\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mTF_NewOperation\u001b[49m\u001b[43m(\u001b[49m\u001b[43mc_graph\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1027\u001b[0m \u001b[43m \u001b[49m\u001b[43mcompat\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mas_str\u001b[49m\u001b[43m(\u001b[49m\u001b[43mnode_def\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mop\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 1028\u001b[0m \u001b[43m \u001b[49m\u001b[43mcompat\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mas_str\u001b[49m\u001b[43m(\u001b[49m\u001b[43mnode_def\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1029\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m node_def\u001b[38;5;241m.\u001b[39mdevice:\n\u001b[0;32m 1030\u001b[0m pywrap_tf_session\u001b[38;5;241m.\u001b[39mTF_SetDevice(op_desc, compat\u001b[38;5;241m.\u001b[39mas_str(node_def\u001b[38;5;241m.\u001b[39mdevice))\n",
207
+ "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
208
+ ]
209
+ }
210
+ ],
211
+ "source": [
212
+ "import cv2\n",
213
+ "import numpy as np\n",
214
+ "from tensorflow.keras.models import load_model\n",
215
+ "from tensorflow.keras.preprocessing import image\n",
216
+ "\n",
217
+ "# Load the trained model\n",
218
+ "model_best = load_model('./model/face_modelCNN.h5') # set your machine model file path here\n",
219
+ "\n",
220
+ "# Classes 7 emotional states\n",
221
+ "class_names = ['Angry', 'Disgusted', 'Fear', 'Happy', 'Sad', 'Surprise', 'Neutral']\n",
222
+ "\n",
223
+ "# Load the pre-trained face cascade\n",
224
+ "face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')\n",
225
+ "\n",
226
+ "# Open a connection to the webcam (0 is usually the default camera)\n",
227
+ "cap = cv2.VideoCapture(0)\n",
228
+ "\n",
229
+ "while True:\n",
230
+ " # Capture frame-by-frame\n",
231
+ " ret, frame = cap.read()\n",
232
+ "\n",
233
+ " # Convert the frame to grayscale for face detection\n",
234
+ " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n",
235
+ "\n",
236
+ " # Detect faces in the frame\n",
237
+ " faces = face_cascade.detectMultiScale(gray, scaleFactor=1.3, minNeighbors=5, minSize=(30, 30))\n",
238
+ "\n",
239
+ " # Process each detected face\n",
240
+ " for (x, y, w, h) in faces:\n",
241
+ " # Extract the face region\n",
242
+ " face_roi = frame[y:y + h, x:x + w]\n",
243
+ "\n",
244
+ " # Resize the face image to the required input size for the model\n",
245
+ " face_image = cv2.resize(face_roi, (48, 48))\n",
246
+ " face_image = cv2.cvtColor(face_image, cv2.COLOR_BGR2GRAY)\n",
247
+ " face_image = image.img_to_array(face_image)\n",
248
+ " face_image = np.expand_dims(face_image, axis=0)\n",
249
+ " face_image = np.vstack([face_image])\n",
250
+ "\n",
251
+ " # Predict emotion using the loaded model\n",
252
+ " predictions = model_best.predict(face_image)\n",
253
+ " emotion_label = class_names[np.argmax(predictions)]\n",
254
+ "\n",
255
+ " # Display the emotion label on the frame\n",
256
+ " cv2.putText(frame, f'Emotion: {emotion_label}', (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX,\n",
257
+ " 0.9, (0, 0, 255), 2)\n",
258
+ "\n",
259
+ " # Draw a rectangle around the face\n",
260
+ " cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)\n",
261
+ "\n",
262
+ " # Display the resulting frame\n",
263
+ " cv2.imshow('Emotion Detection', frame)\n",
264
+ "\n",
265
+ " # Break the loop if 'q' key is pressed\n",
266
+ " if cv2.waitKey(1) & 0xFF == ord('q'):\n",
267
+ " break\n",
268
+ "\n",
269
+ "# Release the webcam and close the window\n",
270
+ "cap.release()\n",
271
+ "cv2.destroyAllWindows()"
272
+ ]
273
+ }
274
+ ],
275
+ "metadata": {
276
+ "kernelspec": {
277
+ "display_name": "Python 3",
278
+ "language": "python",
279
+ "name": "python3"
280
+ },
281
+ "language_info": {
282
+ "codemirror_mode": {
283
+ "name": "ipython",
284
+ "version": 3
285
+ },
286
+ "file_extension": ".py",
287
+ "mimetype": "text/x-python",
288
+ "name": "python",
289
+ "nbconvert_exporter": "python",
290
+ "pygments_lexer": "ipython3",
291
+ "version": "3.12.0"
292
+ }
293
+ },
294
+ "nbformat": 4,
295
+ "nbformat_minor": 2
296
+ }
model_attempt.ipynb ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "ename": "ModuleNotFoundError",
10
+ "evalue": "No module named 'cv2'",
11
+ "output_type": "error",
12
+ "traceback": [
13
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
14
+ "\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
15
+ "Cell \u001b[1;32mIn[2], line 4\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01msklearn\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmodel_selection\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m train_test_split\n\u001b[0;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01msklearn\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmetrics\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m accuracy_score\n\u001b[1;32m----> 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mcv2\u001b[39;00m\n\u001b[0;32m 5\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mnumpy\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mnp\u001b[39;00m\n\u001b[0;32m 6\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtensorflow\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmodels\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n",
16
+ "\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'cv2'"
17
+ ]
18
+ }
19
+ ],
20
+ "source": [
21
+ "from sklearn import svm\n",
22
+ "from sklearn.model_selection import train_test_split\n",
23
+ "from sklearn.metrics import accuracy_score\n",
24
+ "import cv2\n",
25
+ "import numpy as np\n",
26
+ "from tensorflow.keras.models import Sequential\n",
27
+ "from tensorflow.keras.layers import Dense, Dropout, Flatten\n",
28
+ "from tensorflow.keras.layers import Conv2D, MaxPooling2D\n",
29
+ "from tensorflow.keras.utils import to_categorical\n",
30
+ "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
31
+ "\n",
32
+ "# Define the emotions\n",
33
+ "emotions = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral']\n",
34
+ "\n",
35
+ "# Load the dataset\n",
36
+ "train_dir = 'path_to_your_dataset/train'\n",
37
+ "test_dir = 'path_to_your_dataset/test'\n",
38
+ "\n",
39
+ "# Define the data generator\n",
40
+ "train_datagen = ImageDataGenerator(rescale=1./255)\n",
41
+ "test_datagen = ImageDataGenerator(rescale=1./255)\n",
42
+ "\n",
43
+ "train_generator = train_datagen.flow_from_directory(\n",
44
+ " train_dir,\n",
45
+ " target_size=(48, 48),\n",
46
+ " batch_size=32,\n",
47
+ " class_mode='categorical')\n",
48
+ "\n",
49
+ "test_generator = test_datagen.flow_from_directory(\n",
50
+ " test_dir,\n",
51
+ " target_size=(48, 48),\n",
52
+ " batch_size=32,\n",
53
+ " class_mode='categorical')\n",
54
+ "\n",
55
+ "# Define the CNN model\n",
56
+ "model = Sequential()\n",
57
+ "model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(48, 48, 3)))\n",
58
+ "model.add(Conv2D(32, (3, 3), activation='relu'))\n",
59
+ "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
60
+ "model.add(Dropout(0.25))\n",
61
+ "\n",
62
+ "model.add(Conv2D(64, (3, 3), activation='relu'))\n",
63
+ "model.add(Conv2D(64, (3, 3), activation='relu'))\n",
64
+ "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
65
+ "model.add(Dropout(0.25))\n",
66
+ "\n",
67
+ "model.add(Conv2D(128, (3, 3), activation='relu'))\n",
68
+ "model.add(Conv2D(128, (3, 3), activation='relu'))\n",
69
+ "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
70
+ "model.add(Dropout(0.25))\n",
71
+ "\n",
72
+ "model.add(Flatten())\n",
73
+ "model.add(Dense(128, activation='relu'))\n",
74
+ "model.add(Dropout(0.2))\n",
75
+ "model.add(Dense(7, activation='softmax'))\n",
76
+ "\n",
77
+ "# Compile the model\n",
78
+ "model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n",
79
+ "\n",
80
+ "# Train the model\n",
81
+ "model.fit(train_generator, epochs=10)\n",
82
+ "\n",
83
+ "# Define the SVM model\n",
84
+ "svm_model = svm.SVC(kernel='rbf', C=1)\n",
85
+ "\n",
86
+ "# Extract features from the CNN model\n",
87
+ "cnn_features = model.layers[-2].output\n",
88
+ "\n",
89
+ "# Train the SVM model\n",
90
+ "svm_model.fit(cnn_features, train_generator.classes)\n",
91
+ "\n",
92
+ "# Define a function to predict the emotion\n",
93
+ "def predict_emotion(face):\n",
94
+ " face = cv2.resize(face, (48, 48))\n",
95
+ " face = face.reshape(1, 48, 48, 3)\n",
96
+ " face = face / 255.0\n",
97
+ " features = model.predict(face)\n",
98
+ " emotion = svm_model.predict(features)\n",
99
+ " return emotions[emotion[0]]"
100
+ ]
101
+ }
102
+ ],
103
+ "metadata": {
104
+ "kernelspec": {
105
+ "display_name": ".venv",
106
+ "language": "python",
107
+ "name": "python3"
108
+ },
109
+ "language_info": {
110
+ "codemirror_mode": {
111
+ "name": "ipython",
112
+ "version": 3
113
+ },
114
+ "file_extension": ".py",
115
+ "mimetype": "text/x-python",
116
+ "name": "python",
117
+ "nbconvert_exporter": "python",
118
+ "pygments_lexer": "ipython3",
119
+ "version": "3.12.1"
120
+ }
121
+ },
122
+ "nbformat": 4,
123
+ "nbformat_minor": 2
124
+ }
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Flask==3.0.0
2
+ Pillow==10.2.0
3
+ flask-cors==4.0.0
4
+ transformers==4.39.1
5
+ tensorflow
6
+ tf-keras
static/index.html ADDED
@@ -0,0 +1,663 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>Emojiifier</title>
7
+ <script nonce="<%= nonce %>" src="/static/script.js"></script>
8
+ <link
9
+ rel="stylesheet"
10
+ href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css"
11
+ integrity="sha384-Gn5384xqQ1aoWXA+058RXPxPg6fy4IWvTNh0E263XmFcJlSAwiGgFAW/dAiS6JXm"
12
+ crossorigin="anonymous"
13
+ />
14
+ <link
15
+ href="https://fonts.googleapis.com/css2?family=Open+Sans:wght@300;400;600;700&display=swap"
16
+ rel="stylesheet"
17
+ />
18
+ <link
19
+ rel="stylesheet"
20
+ href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.3/css/all.min.css"
21
+ integrity="sha512-iBBXm8fW90+nuLcSKlbmrPcLa0OT92xO1BIsZ+ywDWZCvqsWgccV3gFoRBv0z+8dLJgyAHIhR35VZc2oM/gI1w=="
22
+ crossorigin="anonymous"
23
+ />
24
+ <style nonce="<%= nonce %>">
25
+ body {
26
+ background-color: rgb(255, 187, 0);
27
+ }
28
+
29
+ .navbar {
30
+ transition: transform 0.3s ease-in-out, padding 0.3s ease-in-out,
31
+ background-color 0.3s ease-in-out;
32
+ padding: 1rem 2rem;
33
+ background-color: #ffdc6b;
34
+ border: black 2px solid;
35
+ border-radius: 30px;
36
+ width: 100%;
37
+ margin: 0 auto;
38
+ transform: scale(1);
39
+ position: relative;
40
+ padding-left: 20px;
41
+ padding-right: 20px;
42
+ }
43
+
44
+ .navbar.scrolled {
45
+ padding: 0.5rem 2rem;
46
+ border-radius: 0;
47
+ width: 100%;
48
+ position: fixed;
49
+ top: 0;
50
+ left: 0;
51
+ background-color: #ffffff6e;
52
+ backdrop-filter: blur(3px);
53
+ -webkit-backdrop-filter: blur(3px);
54
+ border: none !important;
55
+ }
56
+
57
+ .navbar-brand {
58
+ transition: transform 1s ease-in-out;
59
+ position: absolute;
60
+ left: 50%;
61
+ transform: translateX(-50%);
62
+ }
63
+
64
+ .navbar.scrolled .navbar-brand {
65
+ transform: translateX(-50%) scale(0.8);
66
+ }
67
+
68
+ .content-area {
69
+ width: 100vw;
70
+ height: calc(90vh);
71
+ padding: 20px;
72
+ }
73
+
74
+ @keyframes emojiChange {
75
+ 0% {
76
+ content: "πŸ˜ƒ";
77
+ }
78
+ 25% {
79
+ content: "😎";
80
+ }
81
+ 50% {
82
+ content: "πŸ€ͺ";
83
+ }
84
+ 75% {
85
+ content: "πŸ₯³";
86
+ }
87
+ 100% {
88
+ content: "πŸ˜ƒ";
89
+ }
90
+ }
91
+
92
+ .emoji-text::after {
93
+ content: "πŸ˜ƒ";
94
+ animation: emojiChange 4s infinite;
95
+ }
96
+
97
+ .spacer {
98
+ margin-top: 13.6vh;
99
+ }
100
+
101
+ .content-wrapper {
102
+ margin-top: -13.6vh;
103
+ }
104
+
105
+ .main-content {
106
+ width: 95%;
107
+ margin: 0 auto;
108
+ min-height: calc(100vh - 13.6vh);
109
+ }
110
+
111
+ .bg-light-gray {
112
+ background-color: #f0f0f0;
113
+ min-height: 70vh;
114
+ border-top-left-radius: 30px !important;
115
+ border-bottom-left-radius: 30px;
116
+ }
117
+
118
+ .bg-dark-red {
119
+ background-color: #324376;
120
+ color: white;
121
+ min-height: 70vh;
122
+ border-top-right-radius: 30px;
123
+ border-bottom-right-radius: 30px;
124
+ }
125
+
126
+ .webcam-video {
127
+ object-fit: cover;
128
+ border-top-left-radius: 30px !important;
129
+ border-bottom-left-radius: 30px;
130
+ }
131
+
132
+ .instructions-container {
133
+ padding: 1.5rem;
134
+ background-color: rgba(255, 255, 255, 0.043);
135
+ border-radius: 20px;
136
+ margin: 1rem;
137
+ height: calc(100% - 2rem);
138
+ display: flex;
139
+ flex-direction: column;
140
+ }
141
+
142
+ .instructions-container h1 {
143
+ font-size: 1.6rem;
144
+ font-weight: 600;
145
+ margin-bottom: 1rem;
146
+ color: #ffc857;
147
+ }
148
+
149
+ .instructions-container h2,
150
+ .instructions-container h3 {
151
+ font-size: 1.1rem;
152
+ font-weight: 500;
153
+ color: #ffb627;
154
+ margin-top: 0.8rem;
155
+ }
156
+
157
+ .instructions-container ul {
158
+ margin-left: 0;
159
+ padding-left: 0;
160
+ }
161
+
162
+ .instructions-container li {
163
+ margin-bottom: 0.6rem;
164
+ font-size: 0.95rem;
165
+ line-height: 1.4;
166
+ color: #f0f0f0;
167
+ }
168
+
169
+ .instructions-container p {
170
+ color: #ffb627;
171
+ font-weight: 500;
172
+ }
173
+
174
+ .capture-btn {
175
+ margin-top: auto;
176
+ background-color: #ffc857;
177
+ color: #15224b;
178
+ border: none;
179
+ padding: 1rem;
180
+ border-radius: 10px;
181
+ font-weight: 600;
182
+ transition: all 0.3s ease;
183
+ opacity: 1;
184
+ }
185
+
186
+ .capture-btn:hover {
187
+ background-color: #ffb627;
188
+ transform: scale(1.02);
189
+ }
190
+
191
+ .calculating-container {
192
+ display: none;
193
+ text-align: center;
194
+ height: 100%;
195
+ justify-content: center;
196
+ align-items: center;
197
+ flex-direction: column;
198
+ }
199
+
200
+ .calculating-text {
201
+ font-size: 2rem;
202
+ color: #ffc857;
203
+ margin-bottom: 2rem;
204
+ }
205
+
206
+ .result-emoji {
207
+ font-size: 8rem;
208
+ margin-bottom: 1rem;
209
+ animation: pulse 2s infinite;
210
+ }
211
+
212
+ .result-comment {
213
+ font-size: 1.2rem;
214
+ color: #f0f0f0;
215
+ }
216
+
217
+ @keyframes bounce {
218
+ 0%,
219
+ 20%,
220
+ 50%,
221
+ 80%,
222
+ 100% {
223
+ transform: translateY(0);
224
+ }
225
+ 40% {
226
+ transform: translateY(-20px);
227
+ }
228
+ 60% {
229
+ transform: translateY(-10px);
230
+ }
231
+ }
232
+
233
+ @keyframes pulse {
234
+ 0% {
235
+ transform: scale(1);
236
+ }
237
+ 50% {
238
+ transform: scale(1.1);
239
+ }
240
+ 100% {
241
+ transform: scale(1);
242
+ }
243
+ }
244
+
245
+ @keyframes spin {
246
+ 0% {
247
+ transform: rotate(0deg);
248
+ }
249
+ 100% {
250
+ transform: rotate(360deg);
251
+ }
252
+ }
253
+
254
+ .loading-spinner {
255
+ width: 50px;
256
+ height: 50px;
257
+ border: 5px solid #f3f3f3;
258
+ border-top: 5px solid #ffc857;
259
+ border-radius: 50%;
260
+ animation: spin 1s linear infinite;
261
+ margin: 20px auto;
262
+ }
263
+
264
+ .bounce {
265
+ animation: bounce 2s infinite;
266
+ }
267
+
268
+ #capturedImage {
269
+ display: none;
270
+ width: 100%;
271
+ height: 100%;
272
+ object-fit: cover;
273
+ border-top-left-radius: 30px !important;
274
+ border-bottom-left-radius: 30px;
275
+ }
276
+
277
+ .technical-section {
278
+ background-color: #324376;
279
+ color: white;
280
+ padding: 4rem 2rem;
281
+ }
282
+
283
+ .technical-section h2 {
284
+ color: #ffc857;
285
+ margin-bottom: 2rem;
286
+ }
287
+
288
+ .step-box {
289
+ background: rgba(255, 255, 255, 0.1);
290
+ border-radius: 10px;
291
+ padding: 1.5rem;
292
+ margin-bottom: 1.5rem;
293
+ }
294
+
295
+ .step-box h3 {
296
+ color: #ffb627;
297
+ margin-bottom: 1rem;
298
+ }
299
+
300
+ .step-box p {
301
+ color: #f0f0f0;
302
+ line-height: 1.6;
303
+ }
304
+
305
+ .code-block {
306
+ background: #1e2a4a;
307
+ padding: 1rem;
308
+ border-radius: 5px;
309
+ margin: 1rem 0;
310
+ font-family: monospace;
311
+ }
312
+
313
+ .probability-bar {
314
+ height: 20px;
315
+ background: #ffc857;
316
+ border-radius: 10px;
317
+ margin: 5px 0;
318
+ }
319
+
320
+ .probability-label {
321
+ display: flex;
322
+ justify-content: space-between;
323
+ color: #f0f0f0;
324
+ margin-bottom: 5px;
325
+ }
326
+
327
+ .detected-emoji {
328
+ font-size: 4rem;
329
+ }
330
+
331
+ .highlight-text {
332
+ color: #ffc857;
333
+ }
334
+
335
+ .confidence-text {
336
+ color: #ffc857;
337
+ }
338
+
339
+ .preprocessed-image {
340
+ max-width: 100%;
341
+ border-radius: 5px;
342
+ }
343
+
344
+ .probability-bar-custom {
345
+ width: var(--percentage);
346
+ }
347
+
348
+ .processing-info {
349
+ margin-top: 10px;
350
+ font-size: 0.9em;
351
+ color: #666;
352
+ }
353
+
354
+ .step-list {
355
+ list-style: none;
356
+ padding: 0;
357
+ }
358
+
359
+ .step-list li {
360
+ padding: 5px 0;
361
+ color: #f0f0f0;
362
+ position: relative;
363
+ padding-left: 20px;
364
+ }
365
+
366
+ .step-list li:before {
367
+ content: "β†’";
368
+ position: absolute;
369
+ left: 0;
370
+ color: #ffc857;
371
+ }
372
+
373
+ .final-analysis {
374
+ background: #324376 !important;
375
+ border-radius: 30px !important;
376
+ padding: 40px !important;
377
+ margin-top: 40px;
378
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
379
+ }
380
+
381
+ .analysis-container {
382
+ padding: 0;
383
+ }
384
+
385
+ .analysis-box {
386
+ background: rgba(255, 255, 255, 0.05) !important;
387
+ padding: 30px;
388
+ border-radius: 30px;
389
+ height: 100%;
390
+ border: none !important;
391
+ margin: 10px;
392
+ text-align: center;
393
+ }
394
+
395
+ .analysis-box h4 {
396
+ color: #f0f0f0;
397
+ font-size: 1.5rem;
398
+ margin-bottom: 20px;
399
+ font-weight: 300;
400
+ letter-spacing: 1px;
401
+ }
402
+
403
+ .detected-emoji {
404
+ font-size: 4.5rem;
405
+ margin: 15px 0;
406
+ }
407
+
408
+ .highlight-text {
409
+ color: #ffc857;
410
+ font-size: 2rem;
411
+ margin-top: 15px !important;
412
+ text-transform: capitalize;
413
+ font-weight: 300;
414
+ }
415
+
416
+ .confidence-text {
417
+ color: #ffc857;
418
+ font-size: 2rem;
419
+ font-weight: 300;
420
+ }
421
+
422
+ .final-analysis h3 {
423
+ color: #f0f0f0;
424
+ font-size: 2rem;
425
+ margin-bottom: 30px;
426
+ font-weight: 300;
427
+ letter-spacing: 1px;
428
+ }
429
+ </style>
430
+ </head>
431
+ <body>
432
+ <div class="container-fluid mt-4 fixed-top px-3">
433
+ <nav class="navbar navbar-expand navbar-light">
434
+ <div class="container-fluid">
435
+ <button
436
+ id="settingsBtn"
437
+ class="btn btn-link text-dark d-none d-lg-block"
438
+ >
439
+ <i class="fas fa-cog fa-lg"></i>
440
+ </button>
441
+
442
+ <a class="navbar-brand font-weight-bold" href="#">
443
+ EM<span class="emoji-text"></span>JIFIER
444
+ </a>
445
+
446
+ <button class="btn btn-link text-dark d-none d-lg-block">
447
+ <i class="fas fa-user fa-lg"></i>
448
+ </button>
449
+ </div>
450
+ </nav>
451
+ </div>
452
+
453
+ <div class="spacer"></div>
454
+
455
+ <div class="content-area">
456
+ <div class="row no-gutters camera-emoji-container">
457
+ <div class="col-md-6 w-100 vh-100 bg-light-gray">
458
+ <video
459
+ id="webcam"
460
+ autoplay
461
+ playsinline
462
+ class="w-100 h-100 webcam-video"
463
+ ></video>
464
+ <img id="capturedImage" alt="Captured Image" />
465
+ </div>
466
+ <div class="col-md-6 no-guttter w-100 vh-100 bg-dark-red">
467
+ <div class="instructions-container">
468
+ <div id="initial-content">
469
+ <h1>Welcome to our Emotion Detection Website!</h1>
470
+
471
+ <h2>Getting Started:</h2>
472
+ <ul class="list-unstyled">
473
+ <li>
474
+ 1. Allow webcam access and ensure your face is well-lit and
475
+ centered
476
+ </li>
477
+ <li>
478
+ 2. Click "DETECT" to capture your image and display your
479
+ emotion (e.g., Happy, Sad)
480
+ </li>
481
+ <li>
482
+ 3. Click "Show It's Working" to see how the system uses facial
483
+ landmarks and SVM model analysis
484
+ </li>
485
+ </ul>
486
+
487
+ <h3>Best Practices:</h3>
488
+ <ul class="list-unstyled">
489
+ <li>β€’ Ensure clear lighting</li>
490
+ <li>β€’ Keep face unobstructed</li>
491
+ <li>β€’ Check webcam permissions if issues arise</li>
492
+ <li>β€’ Adjust lighting if needed</li>
493
+ </ul>
494
+ </div>
495
+
496
+ <div id="calculating-content" class="calculating-container">
497
+ <div class="loading-spinner"></div>
498
+ <div class="result-emoji"></div>
499
+ <div class="result-comment"></div>
500
+ </div>
501
+
502
+ <button id="detectBtn" class="capture-btn">
503
+ <i class="fas fa-smile mr-2"></i>Detect Emotion
504
+ </button>
505
+ </div>
506
+ </div>
507
+ </div>
508
+ </div>
509
+
510
+ <div id="technical-section" class="technical-section">
511
+ <div class="container">
512
+ <h2 class="text-center mb-4">Real-Time Model Analysis</h2>
513
+
514
+ <div class="step-box">
515
+ <h3>Step 1: Image Acquisition</h3>
516
+ <p>Capturing and preparing your image:</p>
517
+ <div id="preprocessed-image"></div>
518
+ </div>
519
+
520
+ <div class="step-box">
521
+ <h3>Step 2: Model Prediction</h3>
522
+ <p>Confidence scores for each emotion:</p>
523
+ <div id="emotion-probabilities">
524
+ <!-- Probabilities will be dynamically inserted here -->
525
+ </div>
526
+ </div>
527
+
528
+ <div class="step-box final-analysis">
529
+ <h3>Final Analysis</h3>
530
+ <div class="analysis-container">
531
+ <div class="row align-items-stretch">
532
+ <div class="col-md-6">
533
+ <div class="analysis-box">
534
+ <h4>Primary Emotion</h4>
535
+ <div id="primary-emotion" class="text-center">
536
+ <span id="detected-emoji" class="detected-emoji"></span>
537
+ <h4 id="detected-emotion" class="highlight-text"></h4>
538
+ </div>
539
+ </div>
540
+ </div>
541
+ <div class="col-md-6">
542
+ <div class="analysis-box">
543
+ <h4>Confidence Level</h4>
544
+ <div id="confidence-score" class="text-center">
545
+ <h4 class="confidence-text"></h4>
546
+ </div>
547
+ </div>
548
+ </div>
549
+ </div>
550
+ </div>
551
+ </div>
552
+ </div>
553
+ </div>
554
+ </body>
555
+ <script nonce="<%= nonce %>">
556
+ const navbar = document.querySelector(".navbar");
557
+
558
+ window.addEventListener("scroll", () => {
559
+ if (window.scrollY > 10) {
560
+ navbar.classList.add("scrolled");
561
+ } else {
562
+ navbar.classList.remove("scrolled");
563
+ }
564
+ });
565
+
566
+ // Function to update technical section with model results
567
+ function updateTechnicalSection(result) {
568
+ // Update preprocessed image
569
+ document.getElementById("preprocessed-image").innerHTML = `
570
+ <img src="${result.grayscale_image}" alt="Preprocessed Image" class="preprocessed-image">
571
+ `;
572
+
573
+ // Update emotion probabilities
574
+ const probContainer = document.getElementById("emotion-probabilities");
575
+ probContainer.innerHTML = "";
576
+
577
+ Object.entries(result.model_probabilities).forEach(
578
+ ([emotion, probability]) => {
579
+ const percentage = (probability * 100).toFixed(1);
580
+ probContainer.innerHTML += `
581
+ <div class="probability-label">
582
+ <span>${emotion}</span>
583
+ <span>${percentage}%</span>
584
+ </div>
585
+ <div class="probability-bar probability-bar-custom" style="--percentage: ${percentage}%"></div>
586
+ `;
587
+ }
588
+ );
589
+
590
+ // Update primary emotion
591
+ document.getElementById("detected-emoji").textContent = result.emoji;
592
+ document.getElementById("detected-emotion").textContent = result.emotion;
593
+
594
+ // Update confidence score
595
+ const confidence = (
596
+ result.model_probabilities[result.emotion] * 100
597
+ ).toFixed(1);
598
+ document
599
+ .getElementById("confidence-score")
600
+ .querySelector("h4").textContent = `${confidence}% Confident`;
601
+ }
602
+ </script>
603
+ <script
604
+ nonce="<%= nonce %>"
605
+ src="https://code.jquery.com/jquery-3.2.1.slim.min.js"
606
+ integrity="sha384-KJ3o2DKtIkvYIK3UENzmM7KCkRr/rE9/Qpg6aAZGJwFDMVNA/GpGFF93hXpG5KkN"
607
+ crossorigin="anonymous"
608
+ ></script>
609
+ <script
610
+ nonce="<%= nonce %>"
611
+ src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.9/umd/popper.min.js"
612
+ integrity="sha384-ApNbgh9B+Y1QKtv3Rn7W3mgPxhU9K/ScQsAP7hUibX39j7fakFPskvXusvfa0b4Q"
613
+ crossorigin="anonymous"
614
+ ></script>
615
+ <script
616
+ nonce="<%= nonce %>"
617
+ src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js"
618
+ integrity="sha384-JZR6Spejh4U02d8jOt6vLEHfe/JQGiRRSQQxSfFWpi1MquVdAyjUar5+76PVCmYl"
619
+ crossorigin="anonymous"
620
+ ></script>
621
+ <script nonce="<%= nonce %>">
622
+ async function initWebcam() {
623
+ try {
624
+ const stream = await navigator.mediaDevices.getUserMedia({
625
+ video: true,
626
+ });
627
+ const video = document.getElementById("webcam");
628
+ video.srcObject = stream;
629
+ } catch (err) {
630
+ console.error("Error accessing webcam:", err);
631
+ }
632
+ }
633
+
634
+ function captureImage() {
635
+ const video = document.getElementById("webcam");
636
+ const canvas = document.createElement("canvas");
637
+ canvas.width = video.videoWidth;
638
+ canvas.height = video.videoHeight;
639
+ canvas
640
+ .getContext("2d")
641
+ .drawImage(video, 0, 0, canvas.width, canvas.height);
642
+
643
+ // Hide video and show captured image
644
+ video.style.display = "none";
645
+ const capturedImage = document.getElementById("capturedImage");
646
+ capturedImage.src = canvas.toDataURL("image/png");
647
+ capturedImage.style.display = "block";
648
+
649
+ // Start emotion detection animation
650
+ const initialContent = document.getElementById("initial-content");
651
+ const calculatingContent = document.getElementById("calculating-content");
652
+ const detectBtn = document.getElementById("detectBtn");
653
+
654
+ initialContent.style.display = "none";
655
+ calculatingContent.style.display = "flex";
656
+ detectBtn.style.opacity = "0";
657
+
658
+ return capturedImage;
659
+ }
660
+
661
+ initWebcam();
662
+ </script>
663
+ </html>
static/script.js ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ document.addEventListener("DOMContentLoaded", () => {
2
+ document
3
+ .getElementById("detectBtn")
4
+ .addEventListener("click", onDetectEmotionClick);
5
+ document.getElementById("settingsBtn").addEventListener("click", onDetectEmotionClick);
6
+ initWebcam();
7
+ const technicalSection = document.getElementById("technical-section");
8
+ technicalSection.style.display = "none";
9
+ });
10
+
11
+ // Send the image to the Flask backend
12
+ async function sendImageToBackend(imageData) {
13
+ try {
14
+ // Get base64 string and remove the prefix
15
+ const base64String = imageData.src.split(',')[1];
16
+
17
+ const response = await fetch("/upload", { // Changed to relative path
18
+ method: "POST",
19
+ headers: {
20
+ "Content-Type": "application/json",
21
+ },
22
+ body: JSON.stringify({
23
+ image: base64String
24
+ })
25
+ });
26
+
27
+ if (!response.ok) {
28
+ throw new Error(`Server responded with status: ${response.status}`);
29
+ }
30
+ const result = await response.json();
31
+ console.log("Emotion detection result:", result);
32
+ return result;
33
+ } catch (error) {
34
+ console.error("Error during API call:", error);
35
+ throw error;
36
+ }
37
+ }
38
+ // Handle the Detect Emotion button click
39
+ async function onDetectEmotionClick() {
40
+ const initialContent = document.getElementById("initial-content");
41
+ const calculatingContent = document.getElementById("calculating-content");
42
+ const detectBtn = document.getElementById("detectBtn");
43
+ const technicalSection = document.getElementById("technical-section");
44
+ const loadingSpinner = document.querySelector(".loading-spinner");
45
+
46
+ try {
47
+ // Hide initial content and show loading
48
+ initialContent.style.display = "none";
49
+ calculatingContent.style.display = "flex";
50
+ detectBtn.style.opacity = "0";
51
+ loadingSpinner.style.display = "block";
52
+
53
+ // Hide technical section while processing
54
+ technicalSection.style.display = "none";
55
+
56
+ // Capture and process image
57
+ const capturedImage = captureImage();
58
+ const result = await sendImageToBackend(capturedImage);
59
+
60
+ if (result.error) {
61
+ throw new Error(result.error);
62
+ }
63
+
64
+ // Hide loading spinner after getting results
65
+ loadingSpinner.style.display = "none";
66
+
67
+ // Show and update technical section
68
+ technicalSection.style.display = "block";
69
+ void technicalSection.offsetWidth;
70
+ technicalSection.classList.add('visible');
71
+
72
+ // Update technical section with processing steps
73
+ updateTechnicalSection(result);
74
+
75
+ // Update emotion display with personalized message
76
+ document.querySelector(".result-emoji").textContent = result.emoji;
77
+ document.querySelector(".result-comment").textContent = getEmotionMessage(result.emotion);
78
+
79
+ // Update button
80
+ detectBtn.innerHTML = '<i class="fas fa-code mr-2"></i>Show Details';
81
+ detectBtn.style.opacity = "1";
82
+
83
+ // Remove old click handler and add new one
84
+ detectBtn.removeEventListener('click', onDetectEmotionClick);
85
+ detectBtn.addEventListener('click', handleShowDetails);
86
+
87
+ } catch (error) {
88
+ console.error("Error:", error);
89
+ loadingSpinner.style.display = "none";
90
+ document.querySelector(".result-emoji").textContent = "❌";
91
+ document.querySelector(".result-comment").textContent = "Failed to detect emotion. Please try again.";
92
+ detectBtn.style.opacity = "1";
93
+ technicalSection.style.display = "none";
94
+ }
95
+ }
96
+
97
+ // Initialize when document is loaded
98
+ document.addEventListener("DOMContentLoaded", () => {
99
+ const detectBtn = document.getElementById("detectBtn");
100
+ const settingsBtn = document.getElementById("settingsBtn");
101
+ const technicalSection = document.getElementById("technical-section");
102
+
103
+ // Hide technical section initially
104
+ technicalSection.style.display = "none";
105
+
106
+ // Add click handlers
107
+ detectBtn.addEventListener("click", onDetectEmotionClick);
108
+ settingsBtn.addEventListener("click", onDetectEmotionClick);
109
+
110
+ initWebcam();
111
+ });
112
+
113
+ function updateTechnicalSection(result) {
114
+ console.log("Full result:", result);
115
+ console.log("Processing steps:", result.processing_steps);
116
+ console.log("Detailed steps:", result.processing_steps.detailed_steps);
117
+
118
+ // Update preprocessed image with processing info
119
+ document.getElementById("preprocessed-image").innerHTML = `
120
+ <img src="${result.grayscale_image}" alt="Preprocessed Image" class="preprocessed-image">
121
+ <div class="processing-info">
122
+ <p>Image Size: ${result.processing_steps.original_size[0]}x${result.processing_steps.original_size[1]}</p>
123
+ <p>Color Mode: ${result.processing_steps.color_mode}</p>
124
+ </div>
125
+ `;
126
+
127
+ // Update processing pipeline steps
128
+ const processSteps = result.processing_steps.detailed_steps;
129
+ if (!processSteps) {
130
+ console.error("No processing steps found in result");
131
+ return;
132
+ }
133
+
134
+ let stepsHtml = '<div class="process-flow">';
135
+
136
+ // Add each processing stage with error handling
137
+ try {
138
+ for (const [stage, steps] of Object.entries(processSteps)) {
139
+ console.log("Processing stage:", stage, steps);
140
+ const stageName = stage.split('_').map(word =>
141
+ word.charAt(0).toUpperCase() + word.slice(1)
142
+ ).join(' ');
143
+
144
+ stepsHtml += `
145
+ <div class="process-stage">
146
+ <h4>${stageName}</h4>
147
+ <ul class="step-list">
148
+ ${Array.isArray(steps) ? steps.map(step => `<li>${step}</li>`).join('') : ''}
149
+ </ul>
150
+ </div>
151
+ `;
152
+ }
153
+ stepsHtml += '</div>';
154
+
155
+ // Add model information
156
+ stepsHtml += `
157
+ <div class="model-info">
158
+ <h4>MODEL SPECIFICATIONS</h4>
159
+ <ul class="step-list">
160
+ <li>Type: ${result.processing_steps.model_type}</li>
161
+ <li>Input Shape: ${result.processing_steps.input_shape}</li>
162
+ <li>Output: ${result.processing_steps.output_classes}</li>
163
+ </ul>
164
+ </div>
165
+ `;
166
+
167
+ console.log("Generated HTML:", stepsHtml);
168
+ document.getElementById("processing-steps").innerHTML = stepsHtml;
169
+ } catch (error) {
170
+ console.error("Error generating steps HTML:", error);
171
+ }
172
+
173
+ // Update emotion probabilities
174
+ const probContainer = document.getElementById("emotion-probabilities");
175
+ probContainer.innerHTML = "";
176
+
177
+ Object.entries(result.model_probabilities).forEach(([emotion, probability]) => {
178
+ const percentage = (probability * 100).toFixed(1);
179
+ const barElement = document.createElement('div');
180
+ barElement.className = 'probability-bar probability-bar-custom probability-bar-width';
181
+ barElement.style.setProperty('--percentage', `${percentage}%`);
182
+
183
+ probContainer.innerHTML += `
184
+ <div class="probability-label">
185
+ <span>${emotion}</span>
186
+ <span>${percentage}%</span>
187
+ </div>
188
+ `;
189
+ probContainer.appendChild(barElement);
190
+ });
191
+
192
+ // Update remaining elements
193
+ document.getElementById("detected-emoji").textContent = result.emoji;
194
+ document.getElementById("detected-emotion").textContent = result.emotion;
195
+ document.getElementById("confidence-score").querySelector("h4").textContent =
196
+ `${(result.model_probabilities[result.emotion] * 100).toFixed(1)}%`;
197
+ }
198
+
199
+ function getEmotionMessage(emotion) {
200
+ const messages = {
201
+ happy: "You're radiating happiness! Your smile lights up the room! 🌟",
202
+ sad: "I see some sadness there. Remember, every cloud has a silver lining! 🌈",
203
+ angry: "Whoa, looking pretty fired up! Take a deep breath and count to ten. 🧘",
204
+ disgust: "That's quite the expression! Something leave a bad taste? πŸ˜–",
205
+ fear: "I sense some anxiety there. Remember, you're stronger than you think! πŸ’ͺ",
206
+ surprise: "Well, that caught you off guard! What an unexpected moment! 😲",
207
+ neutral: "Keeping it cool and collected with that poker face! 😎"
208
+ };
209
+ return messages[emotion] || "Interesting expression you've got there! πŸ€”";
210
+ }
211
+
212
+ // Add this new function for handling the "Show Details" click
213
+ function handleShowDetails() {
214
+ const technicalSection = document.getElementById("technical-section");
215
+ // First ensure the section is visible
216
+ technicalSection.style.display = "block";
217
+ // Then scroll to it
218
+ setTimeout(() => {
219
+ technicalSection.scrollIntoView({
220
+ behavior: "smooth",
221
+ block: "start"
222
+ });
223
+ }, 100);
224
+ }
225
+
static/style.css ADDED
@@ -0,0 +1,349 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Move all styles from the <style> tag in index.html here */
2
+ .detected-emoji {
3
+ font-size: 4rem;
4
+ }
5
+
6
+ .highlight-text {
7
+ color: #ffc857;
8
+ }
9
+
10
+ .confidence-text {
11
+ color: #ffc857;
12
+ font-size: 5rem !important;
13
+ font-weight: 800 !important;
14
+ text-align: center;
15
+ margin: 0;
16
+ display: flex;
17
+ justify-content: center;
18
+ align-items: center;
19
+ height: 100%;
20
+ letter-spacing: -1px;
21
+ }
22
+
23
+ .preprocessed-image {
24
+ max-width: 100%;
25
+ border-radius: 5px;
26
+ }
27
+
28
+ .probability-bar-custom {
29
+ width: var(--percentage);
30
+ }
31
+
32
+ .processing-info p {
33
+ margin: 5px 0;
34
+ color: #f0f0f0;
35
+ }
36
+
37
+ .probability-bar-width {
38
+ width: var(--percentage);
39
+ }
40
+
41
+ .technical-display {
42
+ display: block;
43
+ }
44
+
45
+ .smooth-scroll {
46
+ scroll-behavior: smooth;
47
+ }
48
+
49
+ .process-flow {
50
+ display: flex;
51
+ flex-direction: column;
52
+ gap: 20px;
53
+ margin: 20px 0;
54
+ }
55
+
56
+ .process-stage {
57
+ background: rgba(255, 255, 255, 0.08);
58
+ padding: 20px;
59
+ border-radius: 8px;
60
+ margin-bottom: 15px;
61
+ border-left: 4px solid #ffc857;
62
+ }
63
+
64
+ .process-stage h4 {
65
+ color: #ffc857;
66
+ margin-bottom: 15px;
67
+ font-size: 1.1em;
68
+ text-transform: uppercase;
69
+ letter-spacing: 1px;
70
+ }
71
+
72
+ .model-info {
73
+ margin-top: 20px;
74
+ padding: 15px;
75
+ background: rgba(255, 255, 255, 0.05);
76
+ border-radius: 8px;
77
+ border-left: 4px solid #4CAF50;
78
+ }
79
+
80
+ .model-info h4 {
81
+ color: #4CAF50;
82
+ margin-bottom: 10px;
83
+ }
84
+
85
+ .analysis-box {
86
+ background: rgba(255, 255, 255, 0.05);
87
+ padding: 20px;
88
+ border-radius: 8px;
89
+ margin-bottom: 15px;
90
+ border-left: 4px solid #ffc857;
91
+ display: flex;
92
+ flex-direction: column;
93
+ justify-content: center;
94
+ align-items: center;
95
+ min-height: 250px;
96
+ }
97
+
98
+ .analysis-box h4 {
99
+ color: #ffc857;
100
+ margin-bottom: 15px;
101
+ text-align: center;
102
+ }
103
+
104
+ .step-box .row {
105
+ margin: 0;
106
+ }
107
+
108
+ .step-box h3 {
109
+ position: relative;
110
+ padding-left: 35px;
111
+ }
112
+
113
+ .step-box h3:before {
114
+ content: "";
115
+ position: absolute;
116
+ left: 0;
117
+ top: 50%;
118
+ transform: translateY(-50%);
119
+ width: 25px;
120
+ height: 25px;
121
+ background: #ffc857;
122
+ border-radius: 50%;
123
+ display: flex;
124
+ align-items: center;
125
+ justify-content: center;
126
+ font-size: 14px;
127
+ color: #324376;
128
+ }
129
+
130
+ .step-box:nth-child(1) h3:before { content: "1"; }
131
+ .step-box:nth-child(2) h3:before { content: "2"; }
132
+ .step-box:nth-child(3) h3:before { content: "3"; }
133
+ .step-box:nth-child(4) h3:before { content: "4"; }
134
+
135
+ .step-list li {
136
+ color: #ffffff;
137
+ margin-bottom: 8px;
138
+ line-height: 1.4;
139
+ }
140
+
141
+ .step-list li:before {
142
+ color: #ffc857;
143
+ margin-right: 10px;
144
+ }
145
+
146
+ .final-analysis {
147
+ background: rgba(50, 67, 118, 0.3) !important;
148
+ border-radius: 30px !important;
149
+ padding: 40px !important;
150
+ margin-top: 40px;
151
+ }
152
+
153
+ .final-analysis h3 {
154
+ color: #f0f0f0;
155
+ font-size: 2.5rem;
156
+ margin-bottom: 30px;
157
+ font-weight: 300;
158
+ letter-spacing: 1px;
159
+ padding-left: 0;
160
+ }
161
+
162
+ .final-analysis .analysis-box {
163
+ background: rgba(50, 67, 118, 0.8) !important;
164
+ border-radius: 20px;
165
+ padding: 30px;
166
+ height: 100%;
167
+ min-height: 280px;
168
+ border: none !important;
169
+ display: flex;
170
+ flex-direction: column;
171
+ justify-content: center;
172
+ align-items: center;
173
+ margin: 10px;
174
+ }
175
+
176
+ .final-analysis .analysis-box h4 {
177
+ color: #f0f0f0;
178
+ font-size: 1.8rem;
179
+ margin-bottom: 25px;
180
+ font-weight: 300;
181
+ letter-spacing: 1px;
182
+ text-align: center;
183
+ }
184
+
185
+ .confidence-circle {
186
+ width: 150px;
187
+ height: 150px;
188
+ border-radius: 50%;
189
+ background: rgba(255, 255, 255, 0.05);
190
+ margin: 0 auto;
191
+ display: flex;
192
+ align-items: center;
193
+ justify-content: center;
194
+ border: 4px solid #4CAF50;
195
+ }
196
+
197
+ .confidence-text {
198
+ font-size: 1.5rem;
199
+ color: #4CAF50;
200
+ font-weight: bold;
201
+ }
202
+
203
+ .detected-emoji {
204
+ font-size: 5rem;
205
+ display: block;
206
+ margin-bottom: 10px;
207
+ }
208
+
209
+ .highlight-text {
210
+ font-size: 1.8rem;
211
+ text-transform: capitalize;
212
+ }
213
+
214
+ .step-box h3 {
215
+ color: #ffc857;
216
+ font-size: 1.8rem;
217
+ margin-bottom: 1.5rem;
218
+ }
219
+
220
+ .technical-section {
221
+ display: none; /* Hide initially */
222
+ background-color: #324376;
223
+ color: white;
224
+ padding: 4rem 2rem;
225
+ opacity: 0;
226
+ transition: opacity 0.5s ease-in-out;
227
+ }
228
+
229
+ .technical-section.visible {
230
+ display: block;
231
+ opacity: 1;
232
+ }
233
+
234
+ .confidence-score {
235
+ display: flex;
236
+ justify-content: center;
237
+ align-items: center;
238
+ height: 100%;
239
+ color: #ffc857 !important;
240
+ }
241
+
242
+ #confidence-score {
243
+ height: 100%;
244
+ display: flex;
245
+ justify-content: center;
246
+ align-items: center;
247
+ color: #ffc857 !important;
248
+
249
+ }
250
+
251
+ /* Primary Emotion Box */
252
+ #primary-emotion {
253
+ text-align: center;
254
+ display: flex;
255
+ flex-direction: column;
256
+ justify-content: center;
257
+ align-items: center;
258
+ height: 100%;
259
+ }
260
+
261
+ .detected-emoji {
262
+ font-size: 5.5rem;
263
+ margin: 15px 0;
264
+ }
265
+
266
+ .highlight-text {
267
+ color: #ffc857;
268
+ font-size: 3.5rem !important;
269
+ margin-top: 15px !important;
270
+ text-transform: capitalize;
271
+ font-weight: 700 !important;
272
+ letter-spacing: -1px;
273
+ }
274
+
275
+ /* Confidence Score Box */
276
+ #confidence-score {
277
+ height: 100%;
278
+ display: flex;
279
+ justify-content: center;
280
+ align-items: center;
281
+ }
282
+
283
+ .confidence-text {
284
+ color: #ffc857;
285
+ font-size: 3.5rem !important;
286
+ font-weight: 700 !important;
287
+ text-align: center;
288
+ margin: 0;
289
+ letter-spacing: -1px;
290
+ }
291
+
292
+ /* Container spacing */
293
+ .analysis-container {
294
+ padding: 0;
295
+ }
296
+
297
+ .final-analysis .row {
298
+ margin: 0 -10px;
299
+ }
300
+
301
+ .final-analysis .col-md-6 {
302
+ padding: 10px;
303
+ }
304
+
305
+ /* Remove numbered circles from final analysis */
306
+ .final-analysis h3:before {
307
+ display: none;
308
+ }
309
+
310
+ @keyframes emojiChange {
311
+ 0% {
312
+ content: "πŸ˜ƒ";
313
+ transform: rotate(0deg);
314
+ }
315
+ 25% {
316
+ content: "😎";
317
+ transform: rotate(90deg);
318
+ }
319
+ 50% {
320
+ content: "πŸ€ͺ";
321
+ transform: rotate(180deg);
322
+ }
323
+ 75% {
324
+ content: "πŸ₯³";
325
+ transform: rotate(270deg);
326
+ }
327
+ 100% {
328
+ content: "πŸ˜ƒ";
329
+ transform: rotate(360deg);
330
+ }
331
+ }
332
+
333
+ .emoji-text {
334
+ position: relative;
335
+ display: inline-block;
336
+ }
337
+
338
+ .emoji-text::after {
339
+ content: "πŸ˜ƒ";
340
+ position: absolute;
341
+ animation: emojiChange 4s infinite linear;
342
+ display: inline-block;
343
+ transform-origin: center center;
344
+ width: 1em;
345
+ height: 1em;
346
+ left: 0;
347
+ top: 0;
348
+ }
349
+