Narsil HF Staff commited on
Commit
0844593
·
1 Parent(s): 5591858

Create README

Browse files

def DummyTok(model_max_length=4):
import tempfile

from tokenizers import Tokenizer, models
from transformers.tokenization_utils_fast import PreTrainedTokenizerFast

vocab = [(chr(i), i) for i in range(256)]
tokenizer = Tokenizer(models.Unigram(vocab))
with tempfile.NamedTemporaryFile() as f:
tokenizer.save(f.name)
real_tokenizer = PreTrainedTokenizerFast(
tokenizer_file=f.name, model_max_length=model_max_length
)

real_tokenizer.save("dummy_tokenizer.json")
return real_tokenizer

Files changed (1) hide show
  1. README.md +1 -0
README.md ADDED
@@ -0,0 +1 @@
 
 
1
+ 123