Spaces:
Sleeping
Sleeping
File size: 300 Bytes
6137008 |
1 2 3 4 5 6 |
from transformers import AutoTokenizer, AutoModelForTokenClassification, pipeline
import torch
# Load the tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("jjzha/jobbert_knowledge_extraction")
model = AutoModelForTokenClassification.from_pretrained("jjzha/jobbert_knowledge_extraction") |