TinyPixel commited on
Commit
1371751
·
1 Parent(s): 1caf855

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +3 -22
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +10 -31
special_tokens_map.json CHANGED
@@ -1,24 +1,5 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "</s>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
  }
 
1
  {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,40 +1,19 @@
1
  {
 
2
  "added_tokens_decoder": {
3
- "0": {
4
- "content": "<unk>",
5
  "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": true
10
- },
11
- "1": {
12
- "content": "<s>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "2": {
20
- "content": "</s>",
21
- "lstrip": false,
22
- "normalized": false,
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  }
27
  },
28
- "bos_token": "<s>",
29
- "clean_up_tokenization_spaces": false,
30
- "eos_token": "</s>",
31
- "legacy": false,
32
- "model_max_length": 1000000000000000019884624838656,
33
- "pad_token": "</s>",
34
- "padding_side": "right",
35
- "sp_model_kwargs": {},
36
- "spaces_between_special_tokens": false,
37
- "tokenizer_class": "LlamaTokenizer",
38
- "unk_token": "<unk>",
39
- "use_default_system_prompt": true
40
  }
 
1
  {
2
+ "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
  "lstrip": false,
7
+ "normalized": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  }
12
  },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": true,
15
+ "eos_token": "<|endoftext|>",
16
+ "model_max_length": 1024,
17
+ "tokenizer_class": "GPT2Tokenizer",
18
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
19
  }