Llama-2-7b-pruned50-retrained / gsm8k_5shot_bs16_bf16.json
mgoin's picture
Upload folder using huggingface_hub
0dd18e2 verified
raw
history blame
518 Bytes
{
"results": {
"gsm8k": {
"acc": 0.09173616376042457,
"acc_stderr": 0.007950942148339354
}
},
"versions": {
"gsm8k": 0
},
"config": {
"model": "sparseml",
"model_args": "pretrained=/network/alexandre/research/cerebras/llama2_7B_sparse50_45B_retrained/checkpoint,dtype=bfloat16",
"num_fewshot": 5,
"batch_size": "16",
"batch_sizes": [],
"device": "cuda:4",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}