zainulhakim commited on
Commit
178988f
·
verified ·
1 Parent(s): 9b130bd

Upload tokenizer

Browse files
Files changed (4) hide show
  1. README.md +3 -3
  2. special_tokens_map.json +4 -28
  3. tokenizer_config.json +0 -1
  4. vocab.json +16 -16
README.md CHANGED
@@ -1,10 +1,10 @@
1
  ---
2
- license: apache-2.0
3
  base_model: zainulhakim/241103_wav2vec2_Augmented_Dataset
4
- tags:
5
- - generated_from_trainer
6
  metrics:
7
  - wer
 
 
8
  model-index:
9
  - name: augmented_indians_dataset_client2
10
  results: []
 
1
  ---
 
2
  base_model: zainulhakim/241103_wav2vec2_Augmented_Dataset
3
+ license: apache-2.0
 
4
  metrics:
5
  - wer
6
+ tags:
7
+ - generated_from_trainer
8
  model-index:
9
  - name: augmented_indians_dataset_client2
10
  results: []
special_tokens_map.json CHANGED
@@ -1,30 +1,6 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": true,
19
- "normalized": false,
20
- "rstrip": true,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "[UNK]",
25
- "lstrip": true,
26
- "normalized": false,
27
- "rstrip": true,
28
- "single_word": false
29
- }
30
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
tokenizer_config.json CHANGED
@@ -39,7 +39,6 @@
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
- "processor_class": "Wav2Vec2Processor",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
 
42
  "replace_word_delimiter_char": " ",
43
  "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
vocab.json CHANGED
@@ -1,22 +1,22 @@
1
  {
2
  "[PAD]": 19,
3
  "[UNK]": 18,
4
- "a": 13,
5
- "c": 1,
6
- "d": 10,
7
- "e": 16,
8
- "f": 15,
9
- "h": 4,
10
  "i": 11,
11
- "l": 8,
12
  "m": 3,
13
- "n": 5,
14
- "o": 9,
15
- "p": 6,
16
- "r": 7,
17
- "s": 2,
18
- "t": 17,
19
- "u": 12,
20
- "x": 14,
21
- "|": 0
22
  }
 
1
  {
2
  "[PAD]": 19,
3
  "[UNK]": 18,
4
+ "a": 7,
5
+ "c": 0,
6
+ "d": 17,
7
+ "e": 6,
8
+ "f": 13,
9
+ "h": 14,
10
  "i": 11,
11
+ "l": 12,
12
  "m": 3,
13
+ "n": 9,
14
+ "o": 2,
15
+ "p": 5,
16
+ "r": 8,
17
+ "s": 1,
18
+ "t": 10,
19
+ "u": 16,
20
+ "x": 15,
21
+ "|": 4
22
  }