llama-2-7b-chat-merged-with-llama-2-7b-chat-12layers-T6-25000steps-lora612-hhrlhf
/
generation_config.json
{ | |
"bos_token_id": 1, | |
"do_sample": true, | |
"eos_token_id": 2, | |
"max_length": 4096, | |
"pad_token_id": 0, | |
"temperature": 0.6, | |
"top_p": 0.9, | |
"transformers_version": "4.40.2" | |
} | |