llama-2-7b-chat-merged-with-llama-2-7b-chat-12layers-T6-25000steps-lora612-hhrlhf
/
special_tokens_map.json
{ | |
"bos_token": { | |
"content": "<s>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"eos_token": { | |
"content": "</s>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"unk_token": { | |
"content": "<unk>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false | |
} | |
} | |