finewebedu_32000 / utils.py
gvlassis's picture
Add tokenizer
d7c0260
raw
history blame
400 Bytes
import pprint
def sort_by_token(tokenizer):
vocab = tokenizer.get_vocab()
sorted_vocab = dict(sorted(vocab.items(), key=lambda item: len(item[0])))
pprint.pprint(sorted_vocab, sort_dicts=False)
def sort_by_id(tokenizer):
vocab = tokenizer.get_vocab()
sorted_vocab = dict(sorted(vocab.items(), key=lambda item: item[1]))
pprint.pprint(sorted_vocab, sort_dicts=False)