Spaces:
Runtime error
Runtime error
File size: 1,712 Bytes
ab4de6e 13112c4 2153f35 c6bdd32 eb9f908 ab4de6e caab640 ab4de6e caab640 4894de4 dcd3de7 4894de4 8d929ef ab4de6e caab640 13112c4 caab640 ab4de6e 13112c4 ab4de6e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import streamlit as st
import pandas as pd
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import random as r
import asyncio
import gradio as gr
tokenizer = AutoTokenizer.from_pretrained("APJ23/MultiHeaded_Sentiment_Analysis_Model")
model = AutoModelForSequenceClassification.from_pretrained("APJ23/MultiHeaded_Sentiment_Analysis_Model")
classes = {
0: 'Non-Toxic',
1: 'Toxic',
2: 'Severely Toxic',
3: 'Obscene',
4: 'Threat',
5: 'Insult',
6: 'Identity Hate'
}
@st.cache(allow_output_mutation=True)
def prediction(tweet, model, tokenizer):
inputs = tokenizer(tweet, return_tensors="pt", padding=True, truncation=True)
outputs = model(**inputs)
predicted_class = torch.argmax(outputs.logits, dim=1).item()
predicted_prob = torch.softmax(outputs.logits, dim=1)[0][predicted_class].item()
return classes[predicted_class], predicted_prob
def create_table(predictions):
data = {'Tweet': [], 'Highest Toxicity Class': [], 'Probability': []}
for tweet, prediction in predictions.items():
data['Tweet'].append(tweet)
data['Highest Toxicity Class'].append(prediction[0])
data['Probability'].append(prediction[1])
df = pd.DataFrame(data)
return df
st.title('Toxicity Prediction App')
tweet = st.text_input('Enter a tweet to check for toxicity')
if st.button('Predict'):
predicted_class_label, predicted_prob = prediction(tweet, model, tokenizer)
prediction_text = f'Prediction: {predicted_class_label} ({predicted_prob:.2f})'
st.write(prediction_text)
predictions = {tweet: (predicted_class_label, predicted_prob)}
table = create_table(predictions)
st.table(table)
|