fenguhao commited on
Commit
b7826d6
1 Parent(s): 747c7bb

Upload tokenizer

Browse files
added_tokens.json CHANGED
@@ -1,4 +1,3 @@
1
  {
2
- "0": 1,
3
- "1": 2
4
  }
 
1
  {
2
+ "1 ": 1
 
3
  }
special_tokens_map.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "bos_token": "0",
3
- "eos_token": "1"
4
  }
 
1
  {
2
+ "bos_token": "0 ",
3
+ "eos_token": "1 "
4
  }
tokenizer.json CHANGED
@@ -4,8 +4,8 @@
4
  "padding": null,
5
  "added_tokens": [
6
  {
7
- "id": 1,
8
- "content": "0",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
@@ -13,8 +13,8 @@
13
  "special": true
14
  },
15
  {
16
- "id": 2,
17
- "content": "1",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
7
+ "id": 0,
8
+ "content": "0 ",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
 
13
  "special": true
14
  },
15
  {
16
+ "id": 1,
17
+ "content": "1 ",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
- "1": {
5
- "content": "0",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
- "2": {
13
- "content": "1",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
@@ -18,9 +18,9 @@
18
  "special": true
19
  }
20
  },
21
- "bos_token": "0",
22
  "clean_up_tokenization_spaces": false,
23
- "eos_token": "1",
24
  "model_max_length": 1000000000000000019884624838656,
25
  "tokenizer_class": "GPT2Tokenizer",
26
  "unk_token": ""
 
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "0 ",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
+ "1": {
13
+ "content": "1 ",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
 
18
  "special": true
19
  }
20
  },
21
+ "bos_token": "0 ",
22
  "clean_up_tokenization_spaces": false,
23
+ "eos_token": "1 ",
24
  "model_max_length": 1000000000000000019884624838656,
25
  "tokenizer_class": "GPT2Tokenizer",
26
  "unk_token": ""