Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
# Load the trained model and tokenizer | |
model_path = 'viv/AIKIA/pytorch_model.bin' # Path to the .bin file | |
tokenizer = AutoTokenizer.from_pretrained("nlpaueb/bert-base-greek-uncased-v1") | |
# Directly load the model from the .bin file | |
model = AutoModelForSequenceClassification.from_pretrained(model_path, from_tf=False, config='viv/AIKIA/config.json') | |
# Preprocessing function for Greek text | |
def preprocessing_greek(text): | |
text = text.lower() # Example step: Convert to lowercase | |
return text | |
# Prediction function | |
def predict(sentence): | |
model.eval() | |
preprocessed_sentence = preprocessing_greek(sentence) | |
inputs = tokenizer(preprocessed_sentence, return_tensors="pt") | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
logits = outputs.logits | |
probabilities = torch.nn.functional.softmax(logits, dim=1) | |
predicted_label = torch.argmax(probabilities, dim=1).item() | |
labels_map = {0: 'NOT', 1: 'OFFENSIVE'} | |
return labels_map[predicted_label], probabilities.tolist() | |
# Gradio Interface | |
iface = gr.Interface(fn=predict, inputs="text", outputs=["text", "json"]) | |
iface.launch() | |