|
import gradio as gr
|
|
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
|
import torch
|
|
|
|
|
|
model_name = "ahmetyaylalioglu/text-emotion-classifier"
|
|
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
|
|
|
def predict_emotion(text):
|
|
inputs = tokenizer(text, return_tensors="pt", truncation=True, max_length=512)
|
|
with torch.no_grad():
|
|
outputs = model(**inputs)
|
|
|
|
probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1)
|
|
prediction = torch.argmax(probabilities, dim=-1).item()
|
|
|
|
emotion = model.config.id2label[prediction]
|
|
confidence = probabilities[0][prediction].item()
|
|
|
|
return f"Emotion: {emotion}\nConfidence: {confidence:.2f}"
|
|
|
|
|
|
iface = gr.Interface(
|
|
fn=predict_emotion,
|
|
inputs=gr.Textbox(lines=2, placeholder="Enter text here..."),
|
|
outputs="text",
|
|
title="Emotion Classifier",
|
|
description="Enter some text and click 'Submit' to predict the emotion."
|
|
)
|
|
|
|
|
|
iface.launch() |