kevinzhang120 commited on
Commit
39eb768
·
verified ·
1 Parent(s): e2c8e5a

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -23,7 +23,6 @@
23
  "rstrip": false,
24
  "single_word": false
25
  },
26
- "pad_token": "</s>",
27
  "unk_token": {
28
  "content": "<unk>",
29
  "lstrip": false,
 
23
  "rstrip": false,
24
  "single_word": false
25
  },
 
26
  "unk_token": {
27
  "content": "<unk>",
28
  "lstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:45ccb9c8b6b561889acea59191d66986d314e7cbd6a78abc6e49b139ca91c1e6
3
- size 500058
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5441161d140f285cab44391cd047f31b4fa04e6ed3409996f055d95de80da70c
3
+ size 131
tokenizer_config.json CHANGED
@@ -77,8 +77,7 @@
77
  "legacy": null,
78
  "middle_token": "▁<MID>",
79
  "model_max_length": 1000000000000000019884624838656,
80
- "pad_token": "</s>",
81
- "padding_side": "right",
82
  "prefix_token": "▁<PRE>",
83
  "sp_model_kwargs": {},
84
  "suffix_token": "▁<SUF>",
 
77
  "legacy": null,
78
  "middle_token": "▁<MID>",
79
  "model_max_length": 1000000000000000019884624838656,
80
+ "pad_token": null,
 
81
  "prefix_token": "▁<PRE>",
82
  "sp_model_kwargs": {},
83
  "suffix_token": "▁<SUF>",