|
{ |
|
"results": { |
|
"hendrycksTest-abstract_algebra": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"hendrycksTest-anatomy": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04232073695151589, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04232073695151589 |
|
}, |
|
"hendrycksTest-astronomy": { |
|
"acc": 0.3684210526315789, |
|
"acc_stderr": 0.03925523381052932, |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.03925523381052932 |
|
}, |
|
"hendrycksTest-business_ethics": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"hendrycksTest-clinical_knowledge": { |
|
"acc": 0.42641509433962266, |
|
"acc_stderr": 0.030437794342983045, |
|
"acc_norm": 0.42641509433962266, |
|
"acc_norm_stderr": 0.030437794342983045 |
|
}, |
|
"hendrycksTest-college_biology": { |
|
"acc": 0.3680555555555556, |
|
"acc_stderr": 0.040329990539607195, |
|
"acc_norm": 0.3680555555555556, |
|
"acc_norm_stderr": 0.040329990539607195 |
|
}, |
|
"hendrycksTest-college_chemistry": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"hendrycksTest-college_computer_science": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"hendrycksTest-college_mathematics": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"hendrycksTest-college_medicine": { |
|
"acc": 0.3063583815028902, |
|
"acc_stderr": 0.03514942551267437, |
|
"acc_norm": 0.3063583815028902, |
|
"acc_norm_stderr": 0.03514942551267437 |
|
}, |
|
"hendrycksTest-college_physics": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171453, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"hendrycksTest-computer_security": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"hendrycksTest-conceptual_physics": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.03141082197596239, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.03141082197596239 |
|
}, |
|
"hendrycksTest-econometrics": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.043036840335373146, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.043036840335373146 |
|
}, |
|
"hendrycksTest-electrical_engineering": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04082482904638629, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04082482904638629 |
|
}, |
|
"hendrycksTest-elementary_mathematics": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.02286083830923207, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.02286083830923207 |
|
}, |
|
"hendrycksTest-formal_logic": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.04006168083848876, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.04006168083848876 |
|
}, |
|
"hendrycksTest-global_facts": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"hendrycksTest-high_school_biology": { |
|
"acc": 0.43870967741935485, |
|
"acc_stderr": 0.028229497320317213, |
|
"acc_norm": 0.43870967741935485, |
|
"acc_norm_stderr": 0.028229497320317213 |
|
}, |
|
"hendrycksTest-high_school_chemistry": { |
|
"acc": 0.27586206896551724, |
|
"acc_stderr": 0.03144712581678241, |
|
"acc_norm": 0.27586206896551724, |
|
"acc_norm_stderr": 0.03144712581678241 |
|
}, |
|
"hendrycksTest-high_school_computer_science": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"hendrycksTest-high_school_european_history": { |
|
"acc": 0.5636363636363636, |
|
"acc_stderr": 0.03872592983524754, |
|
"acc_norm": 0.5636363636363636, |
|
"acc_norm_stderr": 0.03872592983524754 |
|
}, |
|
"hendrycksTest-high_school_geography": { |
|
"acc": 0.4494949494949495, |
|
"acc_stderr": 0.0354413249194797, |
|
"acc_norm": 0.4494949494949495, |
|
"acc_norm_stderr": 0.0354413249194797 |
|
}, |
|
"hendrycksTest-high_school_government_and_politics": { |
|
"acc": 0.533678756476684, |
|
"acc_stderr": 0.036002440698671784, |
|
"acc_norm": 0.533678756476684, |
|
"acc_norm_stderr": 0.036002440698671784 |
|
}, |
|
"hendrycksTest-high_school_macroeconomics": { |
|
"acc": 0.3230769230769231, |
|
"acc_stderr": 0.023710888501970562, |
|
"acc_norm": 0.3230769230769231, |
|
"acc_norm_stderr": 0.023710888501970562 |
|
}, |
|
"hendrycksTest-high_school_mathematics": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"hendrycksTest-high_school_microeconomics": { |
|
"acc": 0.3739495798319328, |
|
"acc_stderr": 0.03142946637883708, |
|
"acc_norm": 0.3739495798319328, |
|
"acc_norm_stderr": 0.03142946637883708 |
|
}, |
|
"hendrycksTest-high_school_physics": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.037579499229433426, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.037579499229433426 |
|
}, |
|
"hendrycksTest-high_school_psychology": { |
|
"acc": 0.4990825688073395, |
|
"acc_stderr": 0.021437287056051215, |
|
"acc_norm": 0.4990825688073395, |
|
"acc_norm_stderr": 0.021437287056051215 |
|
}, |
|
"hendrycksTest-high_school_statistics": { |
|
"acc": 0.27314814814814814, |
|
"acc_stderr": 0.030388051301678116, |
|
"acc_norm": 0.27314814814814814, |
|
"acc_norm_stderr": 0.030388051301678116 |
|
}, |
|
"hendrycksTest-high_school_us_history": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.03507793834791323, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.03507793834791323 |
|
}, |
|
"hendrycksTest-high_school_world_history": { |
|
"acc": 0.5611814345991561, |
|
"acc_stderr": 0.032302649315470375, |
|
"acc_norm": 0.5611814345991561, |
|
"acc_norm_stderr": 0.032302649315470375 |
|
}, |
|
"hendrycksTest-human_aging": { |
|
"acc": 0.47085201793721976, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.47085201793721976, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"hendrycksTest-human_sexuality": { |
|
"acc": 0.45038167938931295, |
|
"acc_stderr": 0.04363643698524779, |
|
"acc_norm": 0.45038167938931295, |
|
"acc_norm_stderr": 0.04363643698524779 |
|
}, |
|
"hendrycksTest-international_law": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"hendrycksTest-jurisprudence": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.04750077341199986, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.04750077341199986 |
|
}, |
|
"hendrycksTest-logical_fallacies": { |
|
"acc": 0.48466257668711654, |
|
"acc_stderr": 0.03926522378708843, |
|
"acc_norm": 0.48466257668711654, |
|
"acc_norm_stderr": 0.03926522378708843 |
|
}, |
|
"hendrycksTest-machine_learning": { |
|
"acc": 0.4107142857142857, |
|
"acc_stderr": 0.04669510663875191, |
|
"acc_norm": 0.4107142857142857, |
|
"acc_norm_stderr": 0.04669510663875191 |
|
}, |
|
"hendrycksTest-management": { |
|
"acc": 0.4563106796116505, |
|
"acc_stderr": 0.049318019942204146, |
|
"acc_norm": 0.4563106796116505, |
|
"acc_norm_stderr": 0.049318019942204146 |
|
}, |
|
"hendrycksTest-marketing": { |
|
"acc": 0.6367521367521367, |
|
"acc_stderr": 0.03150712523091264, |
|
"acc_norm": 0.6367521367521367, |
|
"acc_norm_stderr": 0.03150712523091264 |
|
}, |
|
"hendrycksTest-medical_genetics": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"hendrycksTest-miscellaneous": { |
|
"acc": 0.5606641123882503, |
|
"acc_stderr": 0.017747874245683606, |
|
"acc_norm": 0.5606641123882503, |
|
"acc_norm_stderr": 0.017747874245683606 |
|
}, |
|
"hendrycksTest-moral_disputes": { |
|
"acc": 0.47109826589595377, |
|
"acc_stderr": 0.026874085883518348, |
|
"acc_norm": 0.47109826589595377, |
|
"acc_norm_stderr": 0.026874085883518348 |
|
}, |
|
"hendrycksTest-moral_scenarios": { |
|
"acc": 0.23798882681564246, |
|
"acc_stderr": 0.014242630070574915, |
|
"acc_norm": 0.23798882681564246, |
|
"acc_norm_stderr": 0.014242630070574915 |
|
}, |
|
"hendrycksTest-nutrition": { |
|
"acc": 0.434640522875817, |
|
"acc_stderr": 0.028384256704883037, |
|
"acc_norm": 0.434640522875817, |
|
"acc_norm_stderr": 0.028384256704883037 |
|
}, |
|
"hendrycksTest-philosophy": { |
|
"acc": 0.4758842443729904, |
|
"acc_stderr": 0.028365041542564577, |
|
"acc_norm": 0.4758842443729904, |
|
"acc_norm_stderr": 0.028365041542564577 |
|
}, |
|
"hendrycksTest-prehistory": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.027701228468542602, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.027701228468542602 |
|
}, |
|
"hendrycksTest-professional_accounting": { |
|
"acc": 0.32978723404255317, |
|
"acc_stderr": 0.028045946942042398, |
|
"acc_norm": 0.32978723404255317, |
|
"acc_norm_stderr": 0.028045946942042398 |
|
}, |
|
"hendrycksTest-professional_law": { |
|
"acc": 0.3494132985658409, |
|
"acc_stderr": 0.012177306252786686, |
|
"acc_norm": 0.3494132985658409, |
|
"acc_norm_stderr": 0.012177306252786686 |
|
}, |
|
"hendrycksTest-professional_medicine": { |
|
"acc": 0.39705882352941174, |
|
"acc_stderr": 0.029722152099280058, |
|
"acc_norm": 0.39705882352941174, |
|
"acc_norm_stderr": 0.029722152099280058 |
|
}, |
|
"hendrycksTest-professional_psychology": { |
|
"acc": 0.41013071895424835, |
|
"acc_stderr": 0.019898412717635913, |
|
"acc_norm": 0.41013071895424835, |
|
"acc_norm_stderr": 0.019898412717635913 |
|
}, |
|
"hendrycksTest-public_relations": { |
|
"acc": 0.4727272727272727, |
|
"acc_stderr": 0.04782001791380063, |
|
"acc_norm": 0.4727272727272727, |
|
"acc_norm_stderr": 0.04782001791380063 |
|
}, |
|
"hendrycksTest-security_studies": { |
|
"acc": 0.4163265306122449, |
|
"acc_stderr": 0.03155782816556164, |
|
"acc_norm": 0.4163265306122449, |
|
"acc_norm_stderr": 0.03155782816556164 |
|
}, |
|
"hendrycksTest-sociology": { |
|
"acc": 0.5422885572139303, |
|
"acc_stderr": 0.03522865864099597, |
|
"acc_norm": 0.5422885572139303, |
|
"acc_norm_stderr": 0.03522865864099597 |
|
}, |
|
"hendrycksTest-us_foreign_policy": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"hendrycksTest-virology": { |
|
"acc": 0.4397590361445783, |
|
"acc_stderr": 0.03864139923699121, |
|
"acc_norm": 0.4397590361445783, |
|
"acc_norm_stderr": 0.03864139923699121 |
|
}, |
|
"hendrycksTest-world_religions": { |
|
"acc": 0.5789473684210527, |
|
"acc_stderr": 0.03786720706234214, |
|
"acc_norm": 0.5789473684210527, |
|
"acc_norm_stderr": 0.03786720706234214 |
|
} |
|
}, |
|
"versions": { |
|
"hendrycksTest-abstract_algebra": 1, |
|
"hendrycksTest-anatomy": 1, |
|
"hendrycksTest-astronomy": 1, |
|
"hendrycksTest-business_ethics": 1, |
|
"hendrycksTest-clinical_knowledge": 1, |
|
"hendrycksTest-college_biology": 1, |
|
"hendrycksTest-college_chemistry": 1, |
|
"hendrycksTest-college_computer_science": 1, |
|
"hendrycksTest-college_mathematics": 1, |
|
"hendrycksTest-college_medicine": 1, |
|
"hendrycksTest-college_physics": 1, |
|
"hendrycksTest-computer_security": 1, |
|
"hendrycksTest-conceptual_physics": 1, |
|
"hendrycksTest-econometrics": 1, |
|
"hendrycksTest-electrical_engineering": 1, |
|
"hendrycksTest-elementary_mathematics": 1, |
|
"hendrycksTest-formal_logic": 1, |
|
"hendrycksTest-global_facts": 1, |
|
"hendrycksTest-high_school_biology": 1, |
|
"hendrycksTest-high_school_chemistry": 1, |
|
"hendrycksTest-high_school_computer_science": 1, |
|
"hendrycksTest-high_school_european_history": 1, |
|
"hendrycksTest-high_school_geography": 1, |
|
"hendrycksTest-high_school_government_and_politics": 1, |
|
"hendrycksTest-high_school_macroeconomics": 1, |
|
"hendrycksTest-high_school_mathematics": 1, |
|
"hendrycksTest-high_school_microeconomics": 1, |
|
"hendrycksTest-high_school_physics": 1, |
|
"hendrycksTest-high_school_psychology": 1, |
|
"hendrycksTest-high_school_statistics": 1, |
|
"hendrycksTest-high_school_us_history": 1, |
|
"hendrycksTest-high_school_world_history": 1, |
|
"hendrycksTest-human_aging": 1, |
|
"hendrycksTest-human_sexuality": 1, |
|
"hendrycksTest-international_law": 1, |
|
"hendrycksTest-jurisprudence": 1, |
|
"hendrycksTest-logical_fallacies": 1, |
|
"hendrycksTest-machine_learning": 1, |
|
"hendrycksTest-management": 1, |
|
"hendrycksTest-marketing": 1, |
|
"hendrycksTest-medical_genetics": 1, |
|
"hendrycksTest-miscellaneous": 1, |
|
"hendrycksTest-moral_disputes": 1, |
|
"hendrycksTest-moral_scenarios": 1, |
|
"hendrycksTest-nutrition": 1, |
|
"hendrycksTest-philosophy": 1, |
|
"hendrycksTest-prehistory": 1, |
|
"hendrycksTest-professional_accounting": 1, |
|
"hendrycksTest-professional_law": 1, |
|
"hendrycksTest-professional_medicine": 1, |
|
"hendrycksTest-professional_psychology": 1, |
|
"hendrycksTest-public_relations": 1, |
|
"hendrycksTest-security_studies": 1, |
|
"hendrycksTest-sociology": 1, |
|
"hendrycksTest-us_foreign_policy": 1, |
|
"hendrycksTest-virology": 1, |
|
"hendrycksTest-world_religions": 1 |
|
}, |
|
"config": { |
|
"model": "sparseml", |
|
"model_args": "pretrained=/network/alexandre/research/cerebras/llama2_7B_sparse50_45B_retrained/ultrachat200k/llama2_7B_45B_sparse50_LR2e-4_GC2_E2/training,dtype=bfloat16", |
|
"num_fewshot": 5, |
|
"batch_size": "4", |
|
"batch_sizes": [], |
|
"device": "cuda:6", |
|
"no_cache": true, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"description_dict": {} |
|
} |
|
} |