mistral-sft_iter6 / args.json
simonycl's picture
Upload folder using huggingface_hub
dbcd0ea verified
raw
history blame
527 Bytes
{
"project": "iterative preference learning",
"exp_name": "mistral-sft_iter6",
"cache_dir": ".cache",
"result_dir": "results",
"data": null,
"prompt_max_length": 1024,
"max_length": 2048,
"model_name_or_path": ".cache/mistral-sft_iter5",
"ref_model_name_or_path": ".cache/mistral-sft_iter1/",
"beta": 0.1,
"n_epochs": 1,
"per_device_batch_size": 1,
"gradient_accumulation_steps": 32,
"lr": 5e-07,
"warmup_ratio": 0.03,
"max_grad_norm": 1,
"open_port": 46111
}