File size: 4,654 Bytes
73150c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
from transformers import (
    AutoModelForSequenceClassification,  # For text emotion detection model
    pipeline,                           # For creating inference pipeline
    PreTrainedTokenizerFast            # For processing text input
)
from WebCam import capture_webcam
from colorama import Fore, Style        # For colored console output
import pandas as pd                     # For data handling
import time
from datetime import datetime
import matplotlib.pyplot as plt
import gradio as gr

# Dictionaries to store emotion data over time
text_dataDict = {"Time": [], "Emotion": [], "Confidence Score": []}
face_dataDict = {"Time": [], "Emotion": [], "Confidence Score": []}

emotionDetectModel = AutoModelForSequenceClassification.from_pretrained("borisn70/bert-43-multilabel-emotion-detection") #to be replaced with my fine-tuned model once it is ready
tokenizer = PreTrainedTokenizerFast(tokenizer_file="tokenizer.json")
pipe = pipeline(task="text-classification", model=emotionDetectModel, tokenizer=tokenizer)

localFormat = "%Y-%m-%d %H:%M:%S" #this is how will print the timestamp: year-month-day hour-minutes-seconds (army time)
#currTime = datetime.now().astimezone().strftime(localFormat) this returns the time in the localFormat
#current_Time_Tuple = time.strptime(str(currTime), str(localFormat)) #creates a tuple that contains each part of the local format separate
#current_Time_In_Seconds = time.mktime(current_Time_Tuple) #converts the tuple into the number of seconds

def emotionAnalysis(message):
    """
    Main function that processes both text and facial emotions
    Args:
        inp (str): User input text
    Returns:
        tuple: (str, plt) Contains the emotion results text and the updated plot
    """
    if (message.lower() == "quit"):
        return "Quitting...", displayResults()
    
    # Process text emotion
    result = pipe(message)
    text_emotion = result[0]["label"]
    text_score = result[0]["score"]
    words_timestamp = datetime.now().astimezone().strftime(localFormat)
    
    # Store text emotion data for plotting
    text_dataDict["Time"].append(words_timestamp)
    text_dataDict["Emotion"].append(text_emotion)
    text_dataDict["Confidence Score"].append(round(text_score, 2))
    
    # Capture and process facial emotion
    face_emotion, face_score = capture_webcam()
    face_timestamp = datetime.now().astimezone().strftime(localFormat)
    
    # Store facial emotion data for plotting
    face_dataDict["Time"].append(face_timestamp)
    face_dataDict["Emotion"].append(face_emotion)
    face_dataDict["Confidence Score"].append(face_score)
    
    # Return both the text result and the updated plot
    return f"Text: {text_emotion} | Face: {face_emotion}", displayResults()

def displayResults():
    """
    Creates and returns a matplotlib plot showing emotion trends over time
    Returns:
        matplotlib.pyplot: Plot object showing emotion analysis results
    """
    # Create a new figure with specified size
    plt.figure(figsize=(10, 6))
    
    # Set up plot labels and title
    plt.title("Emotions Detected Through Facial Expressions and Text Over Time")
    plt.xlabel("Time")
    plt.ylabel("Emotions")

    #plot facial emotions versus time where time is on the x-axis
    plt.plot(face_dataDict["Time"], face_dataDict["Emotion"], marker='o', linestyle='-', label="Facial Emotions")

    #plot facial emotions versus time where time is on the x-axis
    plt.plot(text_dataDict["Time"], text_dataDict["Emotion"], marker='o', linestyle='-', color='red', label="Text Emotions")

    #showing the graph and the legend
    plt.legend()
    plt.xticks(rotation=45)  # Rotate timestamps for better readability
    plt.tight_layout()       # Adjust layout to prevent label cutoff
    
    return plt

# Get user consent for webcam access
print(Fore.GREEN + "This program will analyze your text for emotions and use your webcame to detect your emotions from your face. Do you give consent? (yes/no): ")
consent = input()

if (consent.lower() == 'yes'):
        # Create Gradio interface with both text output and plot visualization
        interface = gr.Interface(
            fn=emotionAnalysis,
            inputs=["text"],
            outputs=[
                gr.Text(label="Emotion Results"),     # Shows current emotion analysis
                gr.Plot(label="Emotion Timeline")     # Shows emotion trends over time
            ],
            title="Emotion Analysis from Text and Face",
            description="Enter text into the textbox. Then, press 'Submit' or 'Enter' to activate the webcam. Wait and see the results."
        )

interface.launch()