from transformers import AutoTokenizer
from optimum.onnxruntime import ORTModelForQuestionAnswering
import optimum.onnxruntime

tokenizer = AutoTokenizer.from_pretrained("typeof/distilbert-base-cased-finetuned-conll03-english-quantized")
model = optimum.onnxruntime.ORTModelForTokenClassification.from_pretrained("typeof/distilbert-base-cased-finetuned-conll03-english-quantized")
Downloads last month
12
Inference Providers NEW
This model is not currently available via any of the supported Inference Providers.