versae's picture
Initial test with BETO's corpus
2835721
raw
history blame
317 Bytes
from transformers.modeling_flax_pytorch_utils import load_flax_checkpoint_in_pytorch_model
from transformers import RobertaConfig, RobertaModel
config = RobertaConfig.from_pretrained("./")
model = RobertaModel(config)
load_flax_checkpoint_in_pytorch_model(model, "./flax_model.msgpack")
model.save_pretrained("./")