Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
# Load the trained model and tokenizer | |
model_path = 'viv/AIKIA' | |
tokenizer = AutoTokenizer.from_pretrained(model_path) | |
model = AutoModelForSequenceClassification.from_pretrained(model_path) | |
# Preprocessing function for Greek text | |
def preprocessing_greek(text): | |
# Your preprocessing steps | |
text = text.lower() # Example step | |
return text | |
# Prediction function | |
def predict(sentence): | |
model.eval() | |
preprocessed_sentence = preprocessing_greek(sentence) | |
inputs = tokenizer(preprocessed_sentence, return_tensors="pt") | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
logits = outputs.logits | |
probabilities = torch.nn.functional.softmax(logits, dim=1) | |
predicted_label = torch.argmax(probabilities, dim=1).item() | |
labels_map = {0: 'NOT', 1: 'OFFENSIVE'} | |
return labels_map[predicted_label], probabilities.tolist() | |
# Gradio Interface | |
iface = gr.Interface(fn=predict, inputs="text", outputs=["text", "json"]) | |
iface.launch() | |