new5558 commited on
Commit
b2f0905
·
1 Parent(s): 56184bf

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +7 -5
  2. tokenizer_config.json +1 -1
tokenizer.json CHANGED
@@ -1,12 +1,14 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
 
 
4
  "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
 
8
  },
9
- "padding": null,
10
  "added_tokens": [
11
  {
12
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": {
5
+ "strategy": "BatchLongest",
6
  "direction": "Right",
7
+ "pad_to_multiple_of": null,
8
+ "pad_id": 1,
9
+ "pad_type_id": 0,
10
+ "pad_token": "<pad>"
11
  },
 
12
  "added_tokens": [
13
  {
14
  "id": 0,
tokenizer_config.json CHANGED
@@ -5,6 +5,6 @@
5
  "mask_token": "<mask>",
6
  "model_max_length": 512,
7
  "pad_token": "<pad>",
8
- "special_tokens_map_file": "/root/.cache/huggingface/hub/models--new5558--HoogBERTa/snapshots/13b06c762e27014b3852cd8bde7626c98ebdf4e8/special_tokens_map.json",
9
  "tokenizer_class": "PreTrainedTokenizerFast"
10
  }
 
5
  "mask_token": "<mask>",
6
  "model_max_length": 512,
7
  "pad_token": "<pad>",
8
+ "special_tokens_map_file": "/root/.cache/huggingface/hub/models--new5558--HoogBERTa/snapshots/76ab6e47ce08d0ad7ad7ba2bf0f5d0aa1e86ddb6/special_tokens_map.json",
9
  "tokenizer_class": "PreTrainedTokenizerFast"
10
  }