add tokenizer
Browse files- special_tokens_map.json +1 -0
- tokenizer_config.json +1 -0
- vocab.json +1 -0
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "<pad>", "do_lower_case": false, "word_delimiter_token": "|", "replace_word_delimiter_char": " ", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
|
vocab.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"a": 0, "'": 1, "z": 2, "o": 3, "d": 5, "v": 6, "k": 7, "f": 8, "i": 10, "w": 12, "u": 13, "p": 14, "y": 15, "j": 16, "m": 17, "e": 18, "q": 19, "r": 20, "c": 21, "t": 22, "n": 23, "b": 24, "x": 25, "l": 26, "s": 27, "g": 28, "h": 29, "|": 9, "<unk>": 28, "<pad>": 29}
|