|
import gradio as gr |
|
from transformers import AutoModelForTokenClassification, AutoTokenizer |
|
import torch |
|
|
|
|
|
model_name = "EmergentMethods/gliner_medium_news-v2.1" |
|
model = AutoModelForTokenClassification.from_pretrained(model_name) |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
|
def predict(text): |
|
inputs = tokenizer(text, return_tensors="pt") |
|
|
|
|
|
with torch.no_grad(): |
|
outputs = model(**inputs) |
|
|
|
logits = outputs.logits |
|
predictions = torch.argmax(logits, dim=2) |
|
|
|
id2label = model.config.id2label |
|
tokens = tokenizer.convert_ids_to_tokens(inputs["input_ids"][0]) |
|
entities = [{"token": token, "label": id2label[prediction.item()]} for token, prediction in zip(tokens, predictions[0])] |
|
return entities |
|
|
|
demo = gr.Interface(fn=predict, inputs="text", outputs="json") |
|
demo.launch() |
|
|