# Load model directly from transformers import AutoTokenizer, AutoModelForQuestionAnswering from transformers import Trainer import torch tokenizer = AutoTokenizer.from_pretrained("deepset/tinyroberta-squad2") model = AutoModelForQuestionAnswering.from_pretrained("deepset/tinyroberta-squad2") trainer = Trainer(model=model, tokenizer=tokenizer) trainer.save_model("./models/deepset/tinyroberta-squad") print("sucessfully saved model")