File size: 14,252 Bytes
0e52bfb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 |
{
"results": {
"hendrycksTest-abstract_algebra": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"hendrycksTest-anatomy": {
"acc": 0.4,
"acc_stderr": 0.04232073695151589,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04232073695151589
},
"hendrycksTest-astronomy": {
"acc": 0.3684210526315789,
"acc_stderr": 0.03925523381052932,
"acc_norm": 0.3684210526315789,
"acc_norm_stderr": 0.03925523381052932
},
"hendrycksTest-business_ethics": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.42641509433962266,
"acc_stderr": 0.030437794342983045,
"acc_norm": 0.42641509433962266,
"acc_norm_stderr": 0.030437794342983045
},
"hendrycksTest-college_biology": {
"acc": 0.3680555555555556,
"acc_stderr": 0.040329990539607195,
"acc_norm": 0.3680555555555556,
"acc_norm_stderr": 0.040329990539607195
},
"hendrycksTest-college_chemistry": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-college_computer_science": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"hendrycksTest-college_mathematics": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"hendrycksTest-college_medicine": {
"acc": 0.3063583815028902,
"acc_stderr": 0.03514942551267437,
"acc_norm": 0.3063583815028902,
"acc_norm_stderr": 0.03514942551267437
},
"hendrycksTest-college_physics": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"hendrycksTest-computer_security": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"hendrycksTest-conceptual_physics": {
"acc": 0.3617021276595745,
"acc_stderr": 0.03141082197596239,
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.03141082197596239
},
"hendrycksTest-econometrics": {
"acc": 0.2982456140350877,
"acc_stderr": 0.043036840335373146,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.043036840335373146
},
"hendrycksTest-electrical_engineering": {
"acc": 0.4,
"acc_stderr": 0.04082482904638629,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04082482904638629
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.2698412698412698,
"acc_stderr": 0.02286083830923207,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.02286083830923207
},
"hendrycksTest-formal_logic": {
"acc": 0.2777777777777778,
"acc_stderr": 0.04006168083848876,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.04006168083848876
},
"hendrycksTest-global_facts": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"hendrycksTest-high_school_biology": {
"acc": 0.43870967741935485,
"acc_stderr": 0.028229497320317213,
"acc_norm": 0.43870967741935485,
"acc_norm_stderr": 0.028229497320317213
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.27586206896551724,
"acc_stderr": 0.03144712581678241,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.03144712581678241
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"hendrycksTest-high_school_european_history": {
"acc": 0.5636363636363636,
"acc_stderr": 0.03872592983524754,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.03872592983524754
},
"hendrycksTest-high_school_geography": {
"acc": 0.4494949494949495,
"acc_stderr": 0.0354413249194797,
"acc_norm": 0.4494949494949495,
"acc_norm_stderr": 0.0354413249194797
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.533678756476684,
"acc_stderr": 0.036002440698671784,
"acc_norm": 0.533678756476684,
"acc_norm_stderr": 0.036002440698671784
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.3230769230769231,
"acc_stderr": 0.023710888501970562,
"acc_norm": 0.3230769230769231,
"acc_norm_stderr": 0.023710888501970562
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.24814814814814815,
"acc_stderr": 0.0263357394040558,
"acc_norm": 0.24814814814814815,
"acc_norm_stderr": 0.0263357394040558
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.3739495798319328,
"acc_stderr": 0.03142946637883708,
"acc_norm": 0.3739495798319328,
"acc_norm_stderr": 0.03142946637883708
},
"hendrycksTest-high_school_physics": {
"acc": 0.304635761589404,
"acc_stderr": 0.037579499229433426,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.037579499229433426
},
"hendrycksTest-high_school_psychology": {
"acc": 0.4990825688073395,
"acc_stderr": 0.021437287056051215,
"acc_norm": 0.4990825688073395,
"acc_norm_stderr": 0.021437287056051215
},
"hendrycksTest-high_school_statistics": {
"acc": 0.27314814814814814,
"acc_stderr": 0.030388051301678116,
"acc_norm": 0.27314814814814814,
"acc_norm_stderr": 0.030388051301678116
},
"hendrycksTest-high_school_us_history": {
"acc": 0.5147058823529411,
"acc_stderr": 0.03507793834791323,
"acc_norm": 0.5147058823529411,
"acc_norm_stderr": 0.03507793834791323
},
"hendrycksTest-high_school_world_history": {
"acc": 0.5611814345991561,
"acc_stderr": 0.032302649315470375,
"acc_norm": 0.5611814345991561,
"acc_norm_stderr": 0.032302649315470375
},
"hendrycksTest-human_aging": {
"acc": 0.47085201793721976,
"acc_stderr": 0.03350073248773404,
"acc_norm": 0.47085201793721976,
"acc_norm_stderr": 0.03350073248773404
},
"hendrycksTest-human_sexuality": {
"acc": 0.45038167938931295,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.45038167938931295,
"acc_norm_stderr": 0.04363643698524779
},
"hendrycksTest-international_law": {
"acc": 0.628099173553719,
"acc_stderr": 0.04412015806624504,
"acc_norm": 0.628099173553719,
"acc_norm_stderr": 0.04412015806624504
},
"hendrycksTest-jurisprudence": {
"acc": 0.4074074074074074,
"acc_stderr": 0.04750077341199986,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.04750077341199986
},
"hendrycksTest-logical_fallacies": {
"acc": 0.48466257668711654,
"acc_stderr": 0.03926522378708843,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.03926522378708843
},
"hendrycksTest-machine_learning": {
"acc": 0.4107142857142857,
"acc_stderr": 0.04669510663875191,
"acc_norm": 0.4107142857142857,
"acc_norm_stderr": 0.04669510663875191
},
"hendrycksTest-management": {
"acc": 0.4563106796116505,
"acc_stderr": 0.049318019942204146,
"acc_norm": 0.4563106796116505,
"acc_norm_stderr": 0.049318019942204146
},
"hendrycksTest-marketing": {
"acc": 0.6367521367521367,
"acc_stderr": 0.03150712523091264,
"acc_norm": 0.6367521367521367,
"acc_norm_stderr": 0.03150712523091264
},
"hendrycksTest-medical_genetics": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"hendrycksTest-miscellaneous": {
"acc": 0.5606641123882503,
"acc_stderr": 0.017747874245683606,
"acc_norm": 0.5606641123882503,
"acc_norm_stderr": 0.017747874245683606
},
"hendrycksTest-moral_disputes": {
"acc": 0.47109826589595377,
"acc_stderr": 0.026874085883518348,
"acc_norm": 0.47109826589595377,
"acc_norm_stderr": 0.026874085883518348
},
"hendrycksTest-moral_scenarios": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"hendrycksTest-nutrition": {
"acc": 0.434640522875817,
"acc_stderr": 0.028384256704883037,
"acc_norm": 0.434640522875817,
"acc_norm_stderr": 0.028384256704883037
},
"hendrycksTest-philosophy": {
"acc": 0.4758842443729904,
"acc_stderr": 0.028365041542564577,
"acc_norm": 0.4758842443729904,
"acc_norm_stderr": 0.028365041542564577
},
"hendrycksTest-prehistory": {
"acc": 0.4537037037037037,
"acc_stderr": 0.027701228468542602,
"acc_norm": 0.4537037037037037,
"acc_norm_stderr": 0.027701228468542602
},
"hendrycksTest-professional_accounting": {
"acc": 0.32978723404255317,
"acc_stderr": 0.028045946942042398,
"acc_norm": 0.32978723404255317,
"acc_norm_stderr": 0.028045946942042398
},
"hendrycksTest-professional_law": {
"acc": 0.3494132985658409,
"acc_stderr": 0.012177306252786686,
"acc_norm": 0.3494132985658409,
"acc_norm_stderr": 0.012177306252786686
},
"hendrycksTest-professional_medicine": {
"acc": 0.39705882352941174,
"acc_stderr": 0.029722152099280058,
"acc_norm": 0.39705882352941174,
"acc_norm_stderr": 0.029722152099280058
},
"hendrycksTest-professional_psychology": {
"acc": 0.41013071895424835,
"acc_stderr": 0.019898412717635913,
"acc_norm": 0.41013071895424835,
"acc_norm_stderr": 0.019898412717635913
},
"hendrycksTest-public_relations": {
"acc": 0.4727272727272727,
"acc_stderr": 0.04782001791380063,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.04782001791380063
},
"hendrycksTest-security_studies": {
"acc": 0.4163265306122449,
"acc_stderr": 0.03155782816556164,
"acc_norm": 0.4163265306122449,
"acc_norm_stderr": 0.03155782816556164
},
"hendrycksTest-sociology": {
"acc": 0.5422885572139303,
"acc_stderr": 0.03522865864099597,
"acc_norm": 0.5422885572139303,
"acc_norm_stderr": 0.03522865864099597
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"hendrycksTest-virology": {
"acc": 0.4397590361445783,
"acc_stderr": 0.03864139923699121,
"acc_norm": 0.4397590361445783,
"acc_norm_stderr": 0.03864139923699121
},
"hendrycksTest-world_religions": {
"acc": 0.5789473684210527,
"acc_stderr": 0.03786720706234214,
"acc_norm": 0.5789473684210527,
"acc_norm_stderr": 0.03786720706234214
}
},
"versions": {
"hendrycksTest-abstract_algebra": 1,
"hendrycksTest-anatomy": 1,
"hendrycksTest-astronomy": 1,
"hendrycksTest-business_ethics": 1,
"hendrycksTest-clinical_knowledge": 1,
"hendrycksTest-college_biology": 1,
"hendrycksTest-college_chemistry": 1,
"hendrycksTest-college_computer_science": 1,
"hendrycksTest-college_mathematics": 1,
"hendrycksTest-college_medicine": 1,
"hendrycksTest-college_physics": 1,
"hendrycksTest-computer_security": 1,
"hendrycksTest-conceptual_physics": 1,
"hendrycksTest-econometrics": 1,
"hendrycksTest-electrical_engineering": 1,
"hendrycksTest-elementary_mathematics": 1,
"hendrycksTest-formal_logic": 1,
"hendrycksTest-global_facts": 1,
"hendrycksTest-high_school_biology": 1,
"hendrycksTest-high_school_chemistry": 1,
"hendrycksTest-high_school_computer_science": 1,
"hendrycksTest-high_school_european_history": 1,
"hendrycksTest-high_school_geography": 1,
"hendrycksTest-high_school_government_and_politics": 1,
"hendrycksTest-high_school_macroeconomics": 1,
"hendrycksTest-high_school_mathematics": 1,
"hendrycksTest-high_school_microeconomics": 1,
"hendrycksTest-high_school_physics": 1,
"hendrycksTest-high_school_psychology": 1,
"hendrycksTest-high_school_statistics": 1,
"hendrycksTest-high_school_us_history": 1,
"hendrycksTest-high_school_world_history": 1,
"hendrycksTest-human_aging": 1,
"hendrycksTest-human_sexuality": 1,
"hendrycksTest-international_law": 1,
"hendrycksTest-jurisprudence": 1,
"hendrycksTest-logical_fallacies": 1,
"hendrycksTest-machine_learning": 1,
"hendrycksTest-management": 1,
"hendrycksTest-marketing": 1,
"hendrycksTest-medical_genetics": 1,
"hendrycksTest-miscellaneous": 1,
"hendrycksTest-moral_disputes": 1,
"hendrycksTest-moral_scenarios": 1,
"hendrycksTest-nutrition": 1,
"hendrycksTest-philosophy": 1,
"hendrycksTest-prehistory": 1,
"hendrycksTest-professional_accounting": 1,
"hendrycksTest-professional_law": 1,
"hendrycksTest-professional_medicine": 1,
"hendrycksTest-professional_psychology": 1,
"hendrycksTest-public_relations": 1,
"hendrycksTest-security_studies": 1,
"hendrycksTest-sociology": 1,
"hendrycksTest-us_foreign_policy": 1,
"hendrycksTest-virology": 1,
"hendrycksTest-world_religions": 1
},
"config": {
"model": "sparseml",
"model_args": "pretrained=/network/alexandre/research/cerebras/llama2_7B_sparse50_45B_retrained/ultrachat200k/llama2_7B_45B_sparse50_LR2e-4_GC2_E2/training,dtype=bfloat16",
"num_fewshot": 5,
"batch_size": "4",
"batch_sizes": [],
"device": "cuda:6",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
} |