ProjectNoob commited on
Commit
18c7e5b
1 Parent(s): bb19566

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,7 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<unk>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "</s>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -32,7 +32,7 @@
32
  "eos_token": "</s>",
33
  "legacy": false,
34
  "model_max_length": 1000000000000000019884624838656,
35
- "pad_token": "<unk>",
36
  "sp_model_kwargs": {},
37
  "tokenizer_class": "LlamaTokenizer",
38
  "unk_token": "<unk>",
 
32
  "eos_token": "</s>",
33
  "legacy": false,
34
  "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "</s>",
36
  "sp_model_kwargs": {},
37
  "tokenizer_class": "LlamaTokenizer",
38
  "unk_token": "<unk>",