Added chat template.
#3
by
v2ray
- opened
- tokenizer_config.json +3 -2
tokenizer_config.json
CHANGED
@@ -39,5 +39,6 @@
|
|
39 |
"spaces_between_special_tokens": false,
|
40 |
"tokenizer_class": "LlamaTokenizer",
|
41 |
"unk_token": "<unk>",
|
42 |
-
"use_default_system_prompt":
|
43 |
-
}
|
|
|
|
39 |
"spaces_between_special_tokens": false,
|
40 |
"tokenizer_class": "LlamaTokenizer",
|
41 |
"unk_token": "<unk>",
|
42 |
+
"use_default_system_prompt": true,
|
43 |
+
"chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{{ messages[0]['content'] }}{% else %}{% set loop_messages = messages %}{{ 'A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user\\'s questions.' }}{% endif %}{% for message in loop_messages %}{% if loop.index0 == 0 %}{% if message['role'] == 'system' or message['role'] == 'user' %}{{ ' USER: ' + message['content'] }}{% else %}{{ ' ASSISTANT: ' + message['content'] + eos_token }}{% endif %}{% else %}{% if message['role'] == 'system' or message['role'] == 'user' %}{{ '\nUSER: ' + message['content'] }}{% else %}{{ ' ASSISTANT: ' + message['content'] + eos_token }}{% endif %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ ' ASSISTANT:' }}{% endif %}"
|
44 |
+
}
|