beomi commited on
Commit
a645944
1 Parent(s): 69f670d

Update tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -94,5 +94,5 @@
94
  "tokenizer_class": "LlamaTokenizer",
95
  "unk_token": "<unk>",
96
  "use_default_system_prompt": false,
97
- "chat_template": "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '### Instruction:\n' + message['content'] }}\n{% elif message['role'] == 'assistant' %}\n{{ '### Response:\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '### Response:' }}\n{% endif %}\n{% endfor %}",
98
  }
 
94
  "tokenizer_class": "LlamaTokenizer",
95
  "unk_token": "<unk>",
96
  "use_default_system_prompt": false,
97
+ "chat_template": "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '### Instruction:\n' + message['content'] }}\n{% elif message['role'] == 'assistant' %}\n{{ '### Response:\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '### Response:' }}\n{% endif %}\n{% endfor %}"
98
  }