bcsp commited on
Commit
0df8bb8
·
verified ·
1 Parent(s): 99aa7ae

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +1 -0
  2. tokenizer_config.json +3 -0
  3. vocab.json +0 -0
special_tokens_map.json CHANGED
@@ -13,6 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
16
  "unk_token": {
17
  "content": "<|endoftext|>",
18
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<|endoftext|>",
17
  "unk_token": {
18
  "content": "<|endoftext|>",
19
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "50256": {
@@ -317,7 +318,9 @@
317
  "bos_token": "<|endoftext|>",
318
  "clean_up_tokenization_spaces": true,
319
  "eos_token": "<|endoftext|>",
 
320
  "model_max_length": 2048,
 
321
  "return_token_type_ids": false,
322
  "tokenizer_class": "CodeGenTokenizer",
323
  "unk_token": "<|endoftext|>"
 
1
  {
2
+ "add_bos_token": false,
3
  "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
  "50256": {
 
318
  "bos_token": "<|endoftext|>",
319
  "clean_up_tokenization_spaces": true,
320
  "eos_token": "<|endoftext|>",
321
+ "errors": "replace",
322
  "model_max_length": 2048,
323
+ "pad_token": "<|endoftext|>",
324
  "return_token_type_ids": false,
325
  "tokenizer_class": "CodeGenTokenizer",
326
  "unk_token": "<|endoftext|>"
vocab.json CHANGED
The diff for this file is too large to render. See raw diff