Muennighoff's picture
Add eval
b851397
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.31,
"acc_stderr": 0.014632638658632902
},
"anli_r2": {
"acc": 0.31,
"acc_stderr": 0.014632638658632905
},
"anli_r3": {
"acc": 0.3283333333333333,
"acc_stderr": 0.013562032919529017
},
"cb": {
"acc": 0.3392857142857143,
"acc_stderr": 0.06384226561930825,
"f1": 0.29749748849204566
},
"copa": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256
},
"hellaswag": {
"acc": 0.4803823939454292,
"acc_stderr": 0.004985939292819582,
"acc_norm": 0.6294562836088429,
"acc_norm_stderr": 0.004819633668832538
},
"rte": {
"acc": 0.44765342960288806,
"acc_stderr": 0.02993107036293953
},
"winogrande": {
"acc": 0.5887924230465666,
"acc_stderr": 0.013829128358676874
},
"storycloze_2016": {
"acc": 0.7049706039551042,
"acc_stderr": 0.010546232606962289
},
"boolq": {
"acc": 0.5522935779816514,
"acc_stderr": 0.008697094687974059
},
"arc_easy": {
"acc": 0.6262626262626263,
"acc_stderr": 0.009927267058259621,
"acc_norm": 0.5934343434343434,
"acc_norm_stderr": 0.010079056419223527
},
"arc_challenge": {
"acc": 0.2883959044368601,
"acc_stderr": 0.013238394422428173,
"acc_norm": 0.3148464163822526,
"acc_norm_stderr": 0.01357265770308495
},
"sciq": {
"acc": 0.892,
"acc_stderr": 0.0098200016513457,
"acc_norm": 0.869,
"acc_norm_stderr": 0.010674874844837954
},
"piqa": {
"acc": 0.7486398258977149,
"acc_stderr": 0.010121156016819259,
"acc_norm": 0.7633297062023939,
"acc_norm_stderr": 0.009916841655042809
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}