Text Generation
Transformers
Safetensors
English
stablelm
conversational
Inference Endpoints
euclaise commited on
Commit
e094dae
1 Parent(s): ffc21f7

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +25 -4
  2. tokenizer_config.json +4 -0
tokenizer.json CHANGED
@@ -239,10 +239,30 @@
239
  "use_regex": true
240
  },
241
  "post_processor": {
242
- "type": "ByteLevel",
243
- "add_prefix_space": false,
244
- "trim_offsets": true,
245
- "use_regex": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246
  },
247
  "decoder": {
248
  "type": "ByteLevel",
@@ -258,6 +278,7 @@
258
  "end_of_word_suffix": null,
259
  "fuse_unk": false,
260
  "byte_fallback": false,
 
261
  "vocab": {
262
  "<|endoftext|>": 0,
263
  "<|padding|>": 1,
 
239
  "use_regex": true
240
  },
241
  "post_processor": {
242
+ "type": "TemplateProcessing",
243
+ "single": [
244
+ {
245
+ "Sequence": {
246
+ "id": "A",
247
+ "type_id": 0
248
+ }
249
+ }
250
+ ],
251
+ "pair": [
252
+ {
253
+ "Sequence": {
254
+ "id": "A",
255
+ "type_id": 0
256
+ }
257
+ },
258
+ {
259
+ "Sequence": {
260
+ "id": "B",
261
+ "type_id": 1
262
+ }
263
+ }
264
+ ],
265
+ "special_tokens": {}
266
  },
267
  "decoder": {
268
  "type": "ByteLevel",
 
278
  "end_of_word_suffix": null,
279
  "fuse_unk": false,
280
  "byte_fallback": false,
281
+ "ignore_merges": false,
282
  "vocab": {
283
  "<|endoftext|>": 0,
284
  "<|padding|>": 1,
tokenizer_config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
@@ -203,9 +205,11 @@
203
  }
204
  },
205
  "bos_token": "<|endoftext|>",
 
206
  "clean_up_tokenization_spaces": true,
207
  "eos_token": "<|endoftext|>",
208
  "model_max_length": 1000000000000000019884624838656,
 
209
  "tokenizer_class": "GPTNeoXTokenizer",
210
  "unk_token": "<|endoftext|>"
211
  }
 
1
  {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
  "add_prefix_space": false,
5
  "added_tokens_decoder": {
6
  "0": {
 
205
  }
206
  },
207
  "bos_token": "<|endoftext|>",
208
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|' + message['role'] + '|>\n' + message['content'] + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% endif %}",
209
  "clean_up_tokenization_spaces": true,
210
  "eos_token": "<|endoftext|>",
211
  "model_max_length": 1000000000000000019884624838656,
212
+ "pad_token": null,
213
  "tokenizer_class": "GPTNeoXTokenizer",
214
  "unk_token": "<|endoftext|>"
215
  }