Spaces:
Runtime error
Runtime error
File size: 1,655 Bytes
a76b14b c83a37d a76b14b c83a37d f57d52f c83a37d a76b14b f57d52f a76b14b c83a37d a76b14b c83a37d a76b14b f57d52f a76b14b c83a37d a76b14b f57d52f a76b14b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import gradio as gr
import torch
from transformers import AutoTokenizer
# Define model paths
models_path = "./models"
tokenizer_path = f"{models_path}/el_new_tokenizer.pt"
lemmatizer_path = f"{models_path}/el_new_nocharlm_lemmatizer.pt"
tagger_path = f"{models_path}/el_new_transformer_tagger.pt"
parser_path = f"{models_path}/el_new_transformer_parser.pt" # Updated parser model path
# Load models
tokenizer = torch.load(tokenizer_path) # Load tokenizer
lemmatizer_model = torch.load(lemmatizer_path) # Load lemmatizer
tagger_model = torch.load(tagger_path) # Load POS tagger
parser_model = torch.load(parser_path) # Load dependency parser
# Prediction function
def predict(text):
# Tokenize input
inputs = tokenizer(text, return_tensors="pt")
# Perform lemmatization
lemma_outputs = lemmatizer_model(**inputs)
lemmas = lemma_outputs.logits.argmax(-1).tolist() # Process lemmatizer output
# Perform POS tagging
pos_outputs = tagger_model(**inputs)
pos_tags = pos_outputs.logits.argmax(-1).tolist() # Process tagger output
# Perform dependency parsing
dep_outputs = parser_model(**inputs)
dep_parse = dep_outputs.logits.argmax(-1).tolist() # Process parser output
# Return results
return {
"lemmas": lemmas,
"pos_tags": pos_tags,
"dep_parse": dep_parse,
}
# Gradio Interface
interface = gr.Interface(
fn=predict,
inputs="text",
outputs="json",
title="Greek NLP Pipeline",
description="Perform lemmatization, POS tagging, and dependency parsing for Greek text using custom models.",
)
# Launch interface
interface.launch()
|