File size: 272 Bytes
659fe2d |
1 2 3 4 5 6 |
from transformers import AutoTokenizer, LlamaForCausalLM, LlamaConfig, TrainingArguments ,DataCollatorForLanguageModeling
import torch
tokenizer = AutoTokenizer.from_pretrained("./")
model = LlamaForCausalLM.from_pretrained("./")
torch.save(model, "pytorch_model.bin")
|