cportoca commited on
Commit
7573014
·
verified ·
1 Parent(s): 2b5b3bd

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +2 -2
  2. tokenizer_config.json +10 -11
special_tokens_map.json CHANGED
@@ -14,14 +14,14 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "[PAD]",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
- "content": "[UNK]",
25
  "lstrip": false,
26
  "normalized": false,
27
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<PAD>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
24
+ "content": "<UNK>",
25
  "lstrip": false,
26
  "normalized": false,
27
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -1,32 +1,32 @@
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
- "1": {
5
- "content": "<s>",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
- "2": {
13
- "content": "</s>",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
- "10214": {
21
- "content": "[UNK]",
22
  "lstrip": false,
23
  "normalized": false,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
27
  },
28
- "10215": {
29
- "content": "[PAD]",
30
  "lstrip": false,
31
  "normalized": false,
32
  "rstrip": false,
@@ -39,8 +39,7 @@
39
  "eos_token": "</s>",
40
  "errors": "replace",
41
  "model_max_length": 1000000000000000019884624838656,
42
- "pad_token": "[PAD]",
43
- "processor_class": "WhisperProcessor",
44
  "tokenizer_class": "WhisperTokenizer",
45
- "unk_token": "[UNK]"
46
  }
 
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<PAD>",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
+ "1": {
13
+ "content": "<s>",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
+ "2": {
21
+ "content": "</s>",
22
  "lstrip": false,
23
  "normalized": false,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
27
  },
28
+ "3": {
29
+ "content": "<UNK>",
30
  "lstrip": false,
31
  "normalized": false,
32
  "rstrip": false,
 
39
  "eos_token": "</s>",
40
  "errors": "replace",
41
  "model_max_length": 1000000000000000019884624838656,
42
+ "pad_token": "<PAD>",
 
43
  "tokenizer_class": "WhisperTokenizer",
44
+ "unk_token": "<UNK>"
45
  }