Severian commited on
Commit
7e801aa
1 Parent(s): 147dbbf

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,13 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "<|pad|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "unk_token": {
24
  "content": "<|unk|>",
25
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<|endoftext|>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<|unk|>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 8192,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 8192
12
+ },
13
+ "direction": "Left",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "<|pad|>"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
tokenizer_config.json CHANGED
@@ -36,13 +36,19 @@
36
  }
37
  },
38
  "bos_token": "<|startoftext|>",
39
- "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
40
  "clean_up_tokenization_spaces": false,
41
  "eos_token": "<|endoftext|>",
 
42
  "model_max_length": 1000000000000000019884624838656,
43
- "pad_token": "<|pad|>",
 
 
 
44
  "spaces_between_special_tokens": false,
 
45
  "tokenizer_class": "LlamaTokenizer",
 
 
46
  "unk_token": "<|unk|>",
47
  "use_default_system_prompt": false
48
  }
 
36
  }
37
  },
38
  "bos_token": "<|startoftext|>",
 
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "<|endoftext|>",
41
+ "max_length": 8192,
42
  "model_max_length": 1000000000000000019884624838656,
43
+ "pad_to_multiple_of": null,
44
+ "pad_token": "<|endoftext|>",
45
+ "pad_token_type_id": 0,
46
+ "padding_side": "right",
47
  "spaces_between_special_tokens": false,
48
+ "stride": 0,
49
  "tokenizer_class": "LlamaTokenizer",
50
+ "truncation_side": "right",
51
+ "truncation_strategy": "longest_first",
52
  "unk_token": "<|unk|>",
53
  "use_default_system_prompt": false
54
  }