File size: 1,092 Bytes
f78daac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a83ebf6
f78daac
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification

model_name = "macapa/emotion-classifier"
tokenizer = AutoTokenizer.from_pretrained("distilbert-base-uncased")
model = AutoModelForSequenceClassification.from_pretrained(model_name)

device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)

labels = {0: 'sadness',
          1: 'joy',
          2: 'love',
          3: 'anger',
          4: 'fear',
          5: 'surprise'}

def predict(text):
    inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
    inputs = inputs.to(device)
    outputs = model(**inputs)
    predictions = torch.argmax(outputs.logits, dim=1)
    label = labels[predictions.item()]
    return label

# Create the Gradio interface
iface = gr.Interface(
    fn=predict,
    inputs=gr.Textbox(lines=2, placeholder="Enter text here..."),
    outputs="textbox",
    title="Emotion Classification",
    description="Enter some text and the model will predict the emotion",
)

# Launch the interface
iface.launch()