|
{ |
|
"best_metric": 0.48671162128448486, |
|
"best_model_checkpoint": "./output_v2/7b_cluster09_Nous-Hermes-llama-2-7b_partitioned_v3_standardized_09/checkpoint-1600", |
|
"epoch": 2.7127003699136867, |
|
"global_step": 2200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.597, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5778, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5677, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5528, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5558, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5571, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5499, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5491, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5407, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5492, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5258, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5217, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.538, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5265, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5344, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5361, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5186, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5312, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5395, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5399, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.533891499042511, |
|
"eval_runtime": 249.6236, |
|
"eval_samples_per_second": 4.006, |
|
"eval_steps_per_second": 2.003, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"mmlu_eval_accuracy": 0.46207163729626294, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.09090909090909091, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.34375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.45454545454545453, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7333333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, |
|
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, |
|
"mmlu_eval_accuracy_prehistory": 0.4857142857142857, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903, |
|
"mmlu_eval_accuracy_professional_law": 0.34705882352941175, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.37681159420289856, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.267449478023046, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0002, |
|
"loss": 0.524, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5484, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5247, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5305, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5179, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5408, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5472, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5136, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5262, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5361, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5007, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5211, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5217, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5337, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5113, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002, |
|
"loss": 0.518, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5151, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5133, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5083, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5235, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"eval_loss": 0.5213926434516907, |
|
"eval_runtime": 249.5749, |
|
"eval_samples_per_second": 4.007, |
|
"eval_steps_per_second": 2.003, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"mmlu_eval_accuracy": 0.45812855653406065, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.45454545454545453, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.75, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.7391304347826086, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.45454545454545453, |
|
"mmlu_eval_accuracy_marketing": 0.8, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186, |
|
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, |
|
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, |
|
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.36231884057971014, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.3333333333333333, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1127305320092031, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5194, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5279, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5105, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5427, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5276, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4865, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5161, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0002, |
|
"loss": 0.513, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5284, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5101, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5218, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5087, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5157, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002, |
|
"loss": 0.501, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0002, |
|
"loss": 0.508, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5199, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5043, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5069, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5258, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5189, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"eval_loss": 0.5119001865386963, |
|
"eval_runtime": 249.8867, |
|
"eval_samples_per_second": 4.002, |
|
"eval_steps_per_second": 2.001, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"mmlu_eval_accuracy": 0.45806114323766334, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.3125, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7666666666666667, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.5454545454545454, |
|
"mmlu_eval_accuracy_marketing": 0.8, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.4117647058823529, |
|
"mmlu_eval_accuracy_prehistory": 0.4857142857142857, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.36231884057971014, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3333333333333333, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1718710024425318, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5234, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5205, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5146, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5094, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4959, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5001, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5007, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5029, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5143, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4983, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4995, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5072, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0002, |
|
"loss": 0.499, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0002, |
|
"loss": 0.505, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4917, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4983, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4946, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4931, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4836, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5001, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_loss": 0.5022817254066467, |
|
"eval_runtime": 249.7465, |
|
"eval_samples_per_second": 4.004, |
|
"eval_steps_per_second": 2.002, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"mmlu_eval_accuracy": 0.45223086902107806, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.34375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.75, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.7391304347826086, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.5454545454545454, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.45454545454545453, |
|
"mmlu_eval_accuracy_marketing": 0.8, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, |
|
"mmlu_eval_accuracy_philosophy": 0.4117647058823529, |
|
"mmlu_eval_accuracy_prehistory": 0.4857142857142857, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.35294117647058826, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.36231884057971014, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.6842105263157895, |
|
"mmlu_loss": 1.064671132450938, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5135, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4532, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4483, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4507, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4572, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4346, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4306, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0002, |
|
"loss": 0.439, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4215, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4608, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4345, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.422, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4444, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4649, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4508, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.439, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4347, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4413, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4337, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4358, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"eval_loss": 0.5019292235374451, |
|
"eval_runtime": 249.7097, |
|
"eval_samples_per_second": 4.005, |
|
"eval_steps_per_second": 2.002, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"mmlu_eval_accuracy": 0.46197732544268794, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.5, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.34375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6363636363636364, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7333333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.6153846153846154, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.8, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303, |
|
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, |
|
"mmlu_eval_accuracy_philosophy": 0.4117647058823529, |
|
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.32941176470588235, |
|
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, |
|
"mmlu_eval_accuracy_professional_psychology": 0.3188405797101449, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.103624303117589, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4428, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4306, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4585, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4323, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4333, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4364, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4256, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4197, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4382, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4489, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4152, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0002, |
|
"loss": 0.425, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4537, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4496, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4266, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4449, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4381, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4272, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4366, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.0002, |
|
"loss": 0.428, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"eval_loss": 0.4976211488246918, |
|
"eval_runtime": 249.5918, |
|
"eval_samples_per_second": 4.007, |
|
"eval_steps_per_second": 2.003, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"mmlu_eval_accuracy": 0.4642275199494228, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182, |
|
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, |
|
"mmlu_eval_accuracy_astronomy": 0.375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.5, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.6153846153846154, |
|
"mmlu_eval_accuracy_human_aging": 0.7391304347826086, |
|
"mmlu_eval_accuracy_human_sexuality": 0.5, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.5454545454545454, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.76, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303, |
|
"mmlu_eval_accuracy_moral_disputes": 0.47368421052631576, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.48484848484848486, |
|
"mmlu_eval_accuracy_philosophy": 0.38235294117647056, |
|
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, |
|
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, |
|
"mmlu_eval_accuracy_professional_law": 0.34705882352941175, |
|
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, |
|
"mmlu_eval_accuracy_professional_psychology": 0.34782608695652173, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.0431902918079503, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4388, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4396, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.0002, |
|
"loss": 0.448, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4294, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.0002, |
|
"loss": 0.427, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4464, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4319, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4297, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4266, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4567, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4496, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4408, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4487, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4168, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4418, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4288, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4305, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.0002, |
|
"loss": 0.425, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4247, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4276, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"eval_loss": 0.49078691005706787, |
|
"eval_runtime": 249.9521, |
|
"eval_samples_per_second": 4.001, |
|
"eval_steps_per_second": 2.0, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"mmlu_eval_accuracy": 0.46579887678101295, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.3125, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.5, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_computer_security": 0.45454545454545453, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, |
|
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.5, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.2608695652173913, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.5454545454545454, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.45454545454545453, |
|
"mmlu_eval_accuracy_marketing": 0.72, |
|
"mmlu_eval_accuracy_medical_genetics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5454545454545454, |
|
"mmlu_eval_accuracy_philosophy": 0.38235294117647056, |
|
"mmlu_eval_accuracy_prehistory": 0.5714285714285714, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.35294117647058826, |
|
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355, |
|
"mmlu_eval_accuracy_professional_psychology": 0.3188405797101449, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6818181818181818, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1886581055839442, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4448, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4305, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4262, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4274, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4375, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4295, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 0.0002, |
|
"loss": 0.439, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4182, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4162, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4348, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4407, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4213, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4188, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4591, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4098, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4331, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4383, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4334, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4363, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4227, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"eval_loss": 0.48671162128448486, |
|
"eval_runtime": 249.6163, |
|
"eval_samples_per_second": 4.006, |
|
"eval_steps_per_second": 2.003, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"mmlu_eval_accuracy": 0.4663407571795711, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.3181818181818182, |
|
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_computer_security": 0.6363636363636364, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.375, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7166666666666667, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.7272727272727273, |
|
"mmlu_eval_accuracy_marketing": 0.76, |
|
"mmlu_eval_accuracy_medical_genetics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.23, |
|
"mmlu_eval_accuracy_nutrition": 0.48484848484848486, |
|
"mmlu_eval_accuracy_philosophy": 0.3235294117647059, |
|
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, |
|
"mmlu_eval_accuracy_professional_accounting": 0.3225806451612903, |
|
"mmlu_eval_accuracy_professional_law": 0.32941176470588235, |
|
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, |
|
"mmlu_eval_accuracy_professional_psychology": 0.30434782608695654, |
|
"mmlu_eval_accuracy_public_relations": 0.4166666666666667, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.5909090909090909, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.6842105263157895, |
|
"mmlu_loss": 1.050787725118682, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4297, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4376, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3567, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 0.0002, |
|
"loss": 0.332, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3284, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3347, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3498, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3541, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 0.0002, |
|
"loss": 0.34, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3507, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3518, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3515, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3432, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.0002, |
|
"loss": 0.355, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3463, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 0.0002, |
|
"loss": 0.337, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3524, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3414, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3505, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3371, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"eval_loss": 0.5045116543769836, |
|
"eval_runtime": 249.7171, |
|
"eval_samples_per_second": 4.005, |
|
"eval_steps_per_second": 2.002, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"mmlu_eval_accuracy": 0.4648096685434819, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, |
|
"mmlu_eval_accuracy_college_biology": 0.4375, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.18181818181818182, |
|
"mmlu_eval_accuracy_college_medicine": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_computer_security": 0.5454545454545454, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.3902439024390244, |
|
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, |
|
"mmlu_eval_accuracy_global_facts": 0.6, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, |
|
"mmlu_eval_accuracy_high_school_geography": 0.7272727272727273, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.5, |
|
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.75, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.8181818181818182, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.7391304347826086, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.5454545454545454, |
|
"mmlu_eval_accuracy_marketing": 0.68, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.23, |
|
"mmlu_eval_accuracy_nutrition": 0.5151515151515151, |
|
"mmlu_eval_accuracy_philosophy": 0.4117647058823529, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.22580645161290322, |
|
"mmlu_eval_accuracy_professional_law": 0.3588235294117647, |
|
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, |
|
"mmlu_eval_accuracy_professional_psychology": 0.3333333333333333, |
|
"mmlu_eval_accuracy_public_relations": 0.3333333333333333, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.4444444444444444, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1527424410141167, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.0002, |
|
"loss": 0.334, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3392, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3377, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3432, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3581, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3423, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 0.0002, |
|
"loss": 0.346, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3361, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3508, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3502, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3331, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3504, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3589, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3477, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3519, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3549, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3662, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3611, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3506, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3577, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"eval_loss": 0.5058008432388306, |
|
"eval_runtime": 249.914, |
|
"eval_samples_per_second": 4.001, |
|
"eval_steps_per_second": 2.001, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"mmlu_eval_accuracy": 0.46507901610100333, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.3125, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, |
|
"mmlu_eval_accuracy_college_biology": 0.5, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_computer_security": 0.5454545454545454, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, |
|
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, |
|
"mmlu_eval_accuracy_global_facts": 0.5, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.1724137931034483, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, |
|
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.7, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.30434782608695654, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.5909090909090909, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, |
|
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.7692307692307693, |
|
"mmlu_eval_accuracy_jurisprudence": 0.5454545454545454, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.6363636363636364, |
|
"mmlu_eval_accuracy_marketing": 0.68, |
|
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6395348837209303, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.5151515151515151, |
|
"mmlu_eval_accuracy_philosophy": 0.35294117647058826, |
|
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.32941176470588235, |
|
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355, |
|
"mmlu_eval_accuracy_professional_psychology": 0.3333333333333333, |
|
"mmlu_eval_accuracy_public_relations": 0.3333333333333333, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, |
|
"mmlu_eval_accuracy_virology": 0.5555555555555556, |
|
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, |
|
"mmlu_loss": 1.1673369506087066, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3389, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3547, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3581, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3515, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3472, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3582, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3526, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3662, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3447, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3509, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3501, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3417, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 0.0002, |
|
"loss": 0.34, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3572, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3564, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 0.0002, |
|
"loss": 0.347, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 0.0002, |
|
"loss": 0.358, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3448, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3528, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3399, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"eval_loss": 0.49899813532829285, |
|
"eval_runtime": 250.308, |
|
"eval_samples_per_second": 3.995, |
|
"eval_steps_per_second": 1.998, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"mmlu_eval_accuracy": 0.4604361808337048, |
|
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, |
|
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, |
|
"mmlu_eval_accuracy_astronomy": 0.4375, |
|
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_clinical_knowledge": 0.3793103448275862, |
|
"mmlu_eval_accuracy_college_biology": 0.5, |
|
"mmlu_eval_accuracy_college_chemistry": 0.125, |
|
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, |
|
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_medicine": 0.2727272727272727, |
|
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, |
|
"mmlu_eval_accuracy_computer_security": 0.6363636363636364, |
|
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, |
|
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, |
|
"mmlu_eval_accuracy_electrical_engineering": 0.4375, |
|
"mmlu_eval_accuracy_elementary_mathematics": 0.43902439024390244, |
|
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, |
|
"mmlu_eval_accuracy_global_facts": 0.4, |
|
"mmlu_eval_accuracy_high_school_biology": 0.40625, |
|
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, |
|
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, |
|
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, |
|
"mmlu_eval_accuracy_high_school_geography": 0.6818181818181818, |
|
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5714285714285714, |
|
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3488372093023256, |
|
"mmlu_eval_accuracy_high_school_mathematics": 0.13793103448275862, |
|
"mmlu_eval_accuracy_high_school_microeconomics": 0.5769230769230769, |
|
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, |
|
"mmlu_eval_accuracy_high_school_psychology": 0.75, |
|
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, |
|
"mmlu_eval_accuracy_high_school_us_history": 0.7727272727272727, |
|
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, |
|
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, |
|
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, |
|
"mmlu_eval_accuracy_international_law": 0.6923076923076923, |
|
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, |
|
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, |
|
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, |
|
"mmlu_eval_accuracy_management": 0.5454545454545454, |
|
"mmlu_eval_accuracy_marketing": 0.68, |
|
"mmlu_eval_accuracy_medical_genetics": 0.6363636363636364, |
|
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, |
|
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842, |
|
"mmlu_eval_accuracy_moral_scenarios": 0.24, |
|
"mmlu_eval_accuracy_nutrition": 0.45454545454545453, |
|
"mmlu_eval_accuracy_philosophy": 0.38235294117647056, |
|
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, |
|
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, |
|
"mmlu_eval_accuracy_professional_law": 0.35294117647058826, |
|
"mmlu_eval_accuracy_professional_medicine": 0.4838709677419355, |
|
"mmlu_eval_accuracy_professional_psychology": 0.3188405797101449, |
|
"mmlu_eval_accuracy_public_relations": 0.3333333333333333, |
|
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, |
|
"mmlu_eval_accuracy_sociology": 0.6363636363636364, |
|
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, |
|
"mmlu_eval_accuracy_virology": 0.3888888888888889, |
|
"mmlu_eval_accuracy_world_religions": 0.6842105263157895, |
|
"mmlu_loss": 0.9543326205425645, |
|
"step": 2200 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 7, |
|
"total_flos": 5.97973549929726e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|