Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
import safetensors_rust | |
# Load the trained model and tokenizer | |
model_path = 'viv/AIKIA' # Ensure this path is correct, either local or Hugging Face path | |
tokenizer = AutoTokenizer.from_pretrained("nlpaueb/bert-base-greek-uncased-v1") | |
# Try loading the model, fallback to `.bin` if `.safetensors` fails | |
try: | |
model = AutoModelForSequenceClassification.from_pretrained(model_path) | |
except safetensors_rust.SafetensorError: | |
print("Safetensors failed, trying to load bin file.") | |
model = AutoModelForSequenceClassification.from_pretrained("viv/AIKIA/pytorch_model.bin") | |
# Preprocessing function for Greek text | |
def preprocessing_greek(text): | |
text = text.lower() # Example step: Convert to lowercase | |
return text | |
# Prediction function | |
def predict(sentence): | |
model.eval() | |
preprocessed_sentence = preprocessing_greek(sentence) | |
inputs = tokenizer(preprocessed_sentence, return_tensors="pt") | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
logits = outputs.logits | |
probabilities = torch.nn.functional.softmax(logits, dim=1) | |
predicted_label = torch.argmax(probabilities, dim=1).item() | |
labels_map = {0: 'NOT', 1: 'OFFENSIVE'} | |
return labels_map[predicted_label], probabilities.tolist() | |
# Gradio Interface | |
iface = gr.Interface(fn=predict, inputs="text", outputs=["text", "json"]) | |
iface.launch() | |