hanifa-fy commited on
Commit
9bc5c8e
·
verified ·
1 Parent(s): e6fcb4f

Upload processor

Browse files
Files changed (3) hide show
  1. preprocessor_config.json +10 -0
  2. tokenizer_config.json +11 -10
  3. vocab.json +7 -11
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "processor_class": "Wav2Vec2Processor",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "37": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "38": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,21 +16,21 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "39": {
20
  "content": "<s>",
21
- "lstrip": false,
22
  "normalized": false,
23
- "rstrip": false,
24
  "single_word": false,
25
- "special": true
26
  },
27
- "40": {
28
  "content": "</s>",
29
- "lstrip": false,
30
  "normalized": false,
31
- "rstrip": false,
32
  "single_word": false,
33
- "special": true
34
  }
35
  },
36
  "bos_token": "<s>",
@@ -39,6 +39,7 @@
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
 
42
  "replace_word_delimiter_char": " ",
43
  "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "31": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "32": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "33": {
20
  "content": "<s>",
21
+ "lstrip": true,
22
  "normalized": false,
23
+ "rstrip": true,
24
  "single_word": false,
25
+ "special": false
26
  },
27
+ "34": {
28
  "content": "</s>",
29
+ "lstrip": true,
30
  "normalized": false,
31
+ "rstrip": true,
32
  "single_word": false,
33
+ "special": false
34
  }
35
  },
36
  "bos_token": "<s>",
 
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
+ "processor_class": "Wav2Vec2Processor",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
vocab.json CHANGED
@@ -1,7 +1,9 @@
1
  {
2
  "'": 1,
3
- "[PAD]": 38,
4
- "[UNK]": 37,
 
 
5
  "a": 2,
6
  "b": 3,
7
  "c": 4,
@@ -29,13 +31,7 @@
29
  "y": 26,
30
  "z": 27,
31
  "|": 0,
32
- "à": 28,
33
- "ô": 29,
34
- "ú": 30,
35
- "đ": 31,
36
- "ạ": 32,
37
- "ả": 33,
38
- "ậ": 34,
39
- "ắ": 35,
40
- "ồ": 36
41
  }
 
1
  {
2
  "'": 1,
3
+ "</s>": 34,
4
+ "<s>": 33,
5
+ "[PAD]": 32,
6
+ "[UNK]": 31,
7
  "a": 2,
8
  "b": 3,
9
  "c": 4,
 
31
  "y": 26,
32
  "z": 27,
33
  "|": 0,
34
+ "ù": 28,
35
+ "": 29,
36
+ "": 30
 
 
 
 
 
 
37
  }