|
{ |
|
"best_metric": 0.8286852589641435, |
|
"best_model_checkpoint": "distilbert-base-multilingual-cased-hyper-matt/run-ul9reh0d/checkpoint-1600", |
|
"epoch": 4.0, |
|
"eval_steps": 500, |
|
"global_step": 1600, |
|
"is_hyper_param_search": true, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 3.052316665649414, |
|
"learning_rate": 6.753956614871026e-05, |
|
"loss": 0.5937, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.5294617414474487, |
|
"learning_rate": 6.720017134394288e-05, |
|
"loss": 0.5648, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 2.700227737426758, |
|
"learning_rate": 6.686077653917548e-05, |
|
"loss": 0.6581, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.69797134399414, |
|
"learning_rate": 6.65213817344081e-05, |
|
"loss": 0.5975, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 10.163844108581543, |
|
"learning_rate": 6.618198692964071e-05, |
|
"loss": 0.3418, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 23.86635971069336, |
|
"learning_rate": 6.584259212487333e-05, |
|
"loss": 0.7951, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 5.886571884155273, |
|
"learning_rate": 6.550319732010593e-05, |
|
"loss": 0.7399, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 18.702909469604492, |
|
"learning_rate": 6.516380251533855e-05, |
|
"loss": 0.4083, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 6.397231101989746, |
|
"learning_rate": 6.482440771057116e-05, |
|
"loss": 0.6293, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 39.70441436767578, |
|
"learning_rate": 6.448501290580378e-05, |
|
"loss": 0.6267, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 1.6805046796798706, |
|
"learning_rate": 6.414561810103638e-05, |
|
"loss": 0.5261, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.5493907928466797, |
|
"learning_rate": 6.380622329626899e-05, |
|
"loss": 0.2222, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 5.368661403656006, |
|
"learning_rate": 6.346682849150162e-05, |
|
"loss": 0.5268, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.198462963104248, |
|
"learning_rate": 6.312743368673422e-05, |
|
"loss": 0.5109, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 6.481055736541748, |
|
"learning_rate": 6.278803888196683e-05, |
|
"loss": 0.3406, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.7471745014190674, |
|
"learning_rate": 6.244864407719945e-05, |
|
"loss": 0.6461, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 16.66990089416504, |
|
"learning_rate": 6.210924927243205e-05, |
|
"loss": 0.4364, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 7.193353176116943, |
|
"learning_rate": 6.176985446766467e-05, |
|
"loss": 0.3659, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.475, |
|
"grad_norm": 8.753779411315918, |
|
"learning_rate": 6.143045966289728e-05, |
|
"loss": 0.5441, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.7563661336898804, |
|
"learning_rate": 6.10910648581299e-05, |
|
"loss": 0.4186, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.525, |
|
"grad_norm": 27.28961753845215, |
|
"learning_rate": 6.07516700533625e-05, |
|
"loss": 0.394, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9820290803909302, |
|
"learning_rate": 6.041227524859511e-05, |
|
"loss": 0.9692, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.575, |
|
"grad_norm": 0.795172393321991, |
|
"learning_rate": 6.0072880443827725e-05, |
|
"loss": 0.4171, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.29914233088493347, |
|
"learning_rate": 5.973348563906034e-05, |
|
"loss": 0.467, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 8.065807342529297, |
|
"learning_rate": 5.939409083429295e-05, |
|
"loss": 1.0716, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.9354641437530518, |
|
"learning_rate": 5.905469602952556e-05, |
|
"loss": 0.3503, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.675, |
|
"grad_norm": 0.45260441303253174, |
|
"learning_rate": 5.871530122475817e-05, |
|
"loss": 0.7493, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5366302728652954, |
|
"learning_rate": 5.8375906419990785e-05, |
|
"loss": 0.346, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.725, |
|
"grad_norm": 8.400691032409668, |
|
"learning_rate": 5.80365116152234e-05, |
|
"loss": 0.3399, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 7.528047561645508, |
|
"learning_rate": 5.769711681045601e-05, |
|
"loss": 0.5184, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.775, |
|
"grad_norm": 0.5284883975982666, |
|
"learning_rate": 5.7357722005688614e-05, |
|
"loss": 0.3518, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5432015657424927, |
|
"learning_rate": 5.7018327200921226e-05, |
|
"loss": 0.4533, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.825, |
|
"grad_norm": 8.873420715332031, |
|
"learning_rate": 5.667893239615384e-05, |
|
"loss": 0.6322, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 15.452073097229004, |
|
"learning_rate": 5.633953759138645e-05, |
|
"loss": 0.5022, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 5.867374897003174, |
|
"learning_rate": 5.600014278661906e-05, |
|
"loss": 0.2418, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 24.693492889404297, |
|
"learning_rate": 5.5660747981851674e-05, |
|
"loss": 0.5568, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.925, |
|
"grad_norm": 2.5772554874420166, |
|
"learning_rate": 5.5321353177084286e-05, |
|
"loss": 0.5199, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.09758460521698, |
|
"learning_rate": 5.4981958372316905e-05, |
|
"loss": 0.4895, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.975, |
|
"grad_norm": 0.9194037914276123, |
|
"learning_rate": 5.464256356754952e-05, |
|
"loss": 0.5017, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7973395586013794, |
|
"learning_rate": 5.430316876278213e-05, |
|
"loss": 0.316, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8425, |
|
"eval_f1": 0.7407407407407407, |
|
"eval_loss": 0.4195494055747986, |
|
"eval_precision": 0.75, |
|
"eval_recall": 0.7317073170731707, |
|
"eval_runtime": 1.5136, |
|
"eval_samples_per_second": 264.267, |
|
"eval_steps_per_second": 16.517, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.025, |
|
"grad_norm": 0.25087594985961914, |
|
"learning_rate": 5.396377395801474e-05, |
|
"loss": 0.2648, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.209116458892822, |
|
"learning_rate": 5.362437915324735e-05, |
|
"loss": 0.4875, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.075, |
|
"grad_norm": 0.28788337111473083, |
|
"learning_rate": 5.3284984348479965e-05, |
|
"loss": 0.5706, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.34227511286735535, |
|
"learning_rate": 5.2945589543712577e-05, |
|
"loss": 0.2126, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 9.716309547424316, |
|
"learning_rate": 5.260619473894518e-05, |
|
"loss": 0.489, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 10.545299530029297, |
|
"learning_rate": 5.2266799934177794e-05, |
|
"loss": 0.4637, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.175, |
|
"grad_norm": 0.4732460677623749, |
|
"learning_rate": 5.1927405129410406e-05, |
|
"loss": 0.4207, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 7.716385841369629, |
|
"learning_rate": 5.158801032464302e-05, |
|
"loss": 0.4326, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.225, |
|
"grad_norm": 4.098447799682617, |
|
"learning_rate": 5.124861551987563e-05, |
|
"loss": 0.3346, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 15.903754234313965, |
|
"learning_rate": 5.090922071510824e-05, |
|
"loss": 0.5305, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.275, |
|
"grad_norm": 0.19262875616550446, |
|
"learning_rate": 5.0569825910340854e-05, |
|
"loss": 0.2946, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.18385837972164154, |
|
"learning_rate": 5.0230431105573466e-05, |
|
"loss": 0.2572, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.325, |
|
"grad_norm": 0.1092706024646759, |
|
"learning_rate": 4.989103630080608e-05, |
|
"loss": 0.1818, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.2516274154186249, |
|
"learning_rate": 4.955164149603869e-05, |
|
"loss": 0.4868, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 0.19831004738807678, |
|
"learning_rate": 4.92122466912713e-05, |
|
"loss": 0.268, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.16449308395385742, |
|
"learning_rate": 4.887285188650391e-05, |
|
"loss": 0.1882, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.425, |
|
"grad_norm": 8.629518508911133, |
|
"learning_rate": 4.853345708173652e-05, |
|
"loss": 0.6056, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 2.007662057876587, |
|
"learning_rate": 4.819406227696913e-05, |
|
"loss": 0.232, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.475, |
|
"grad_norm": 0.3099694550037384, |
|
"learning_rate": 4.785466747220174e-05, |
|
"loss": 0.2788, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 40.103694915771484, |
|
"learning_rate": 4.7515272667434355e-05, |
|
"loss": 0.1362, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.525, |
|
"grad_norm": 31.98729133605957, |
|
"learning_rate": 4.717587786266697e-05, |
|
"loss": 0.5487, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 2.4679489135742188, |
|
"learning_rate": 4.683648305789958e-05, |
|
"loss": 0.5429, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.575, |
|
"grad_norm": 24.133512496948242, |
|
"learning_rate": 4.64970882531322e-05, |
|
"loss": 0.4373, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.29410362243652344, |
|
"learning_rate": 4.615769344836481e-05, |
|
"loss": 0.4658, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 3.7344865798950195, |
|
"learning_rate": 4.581829864359742e-05, |
|
"loss": 0.0568, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.154106006026268, |
|
"learning_rate": 4.5478903838830034e-05, |
|
"loss": 0.1266, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.675, |
|
"grad_norm": 62.301597595214844, |
|
"learning_rate": 4.5139509034062646e-05, |
|
"loss": 0.3428, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.08602792769670486, |
|
"learning_rate": 4.480011422929526e-05, |
|
"loss": 0.641, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.725, |
|
"grad_norm": 0.13891366124153137, |
|
"learning_rate": 4.446071942452787e-05, |
|
"loss": 0.0499, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.22815102338790894, |
|
"learning_rate": 4.4121324619760475e-05, |
|
"loss": 0.8891, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.775, |
|
"grad_norm": 0.284967839717865, |
|
"learning_rate": 4.378192981499309e-05, |
|
"loss": 0.5238, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.2135128527879715, |
|
"learning_rate": 4.34425350102257e-05, |
|
"loss": 0.4598, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.825, |
|
"grad_norm": 0.2596323788166046, |
|
"learning_rate": 4.310314020545831e-05, |
|
"loss": 0.2411, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.12039212137460709, |
|
"learning_rate": 4.276374540069092e-05, |
|
"loss": 0.5047, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 7.840117454528809, |
|
"learning_rate": 4.2424350595923535e-05, |
|
"loss": 0.2763, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 29.95262908935547, |
|
"learning_rate": 4.208495579115615e-05, |
|
"loss": 0.3228, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.925, |
|
"grad_norm": 0.21045206487178802, |
|
"learning_rate": 4.174556098638876e-05, |
|
"loss": 0.1931, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.31024569272994995, |
|
"learning_rate": 4.140616618162137e-05, |
|
"loss": 0.463, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.975, |
|
"grad_norm": 0.5587632656097412, |
|
"learning_rate": 4.106677137685398e-05, |
|
"loss": 0.1264, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 6.912115573883057, |
|
"learning_rate": 4.0727376572086595e-05, |
|
"loss": 0.4146, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.875, |
|
"eval_f1": 0.8106060606060606, |
|
"eval_loss": 0.48966696858406067, |
|
"eval_precision": 0.7588652482269503, |
|
"eval_recall": 0.8699186991869918, |
|
"eval_runtime": 1.5322, |
|
"eval_samples_per_second": 261.066, |
|
"eval_steps_per_second": 16.317, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.025, |
|
"grad_norm": 9.648056030273438, |
|
"learning_rate": 4.03879817673192e-05, |
|
"loss": 0.1043, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.14807315170764923, |
|
"learning_rate": 4.004858696255181e-05, |
|
"loss": 0.1021, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.075, |
|
"grad_norm": 1.4937807321548462, |
|
"learning_rate": 3.9709192157784424e-05, |
|
"loss": 0.4885, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.08521637320518494, |
|
"learning_rate": 3.9369797353017036e-05, |
|
"loss": 0.0906, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.125, |
|
"grad_norm": 0.1001555472612381, |
|
"learning_rate": 3.903040254824965e-05, |
|
"loss": 0.2574, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.04532875493168831, |
|
"learning_rate": 3.869100774348226e-05, |
|
"loss": 0.1966, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.175, |
|
"grad_norm": 0.048231206834316254, |
|
"learning_rate": 3.835161293871487e-05, |
|
"loss": 0.2154, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.1648254096508026, |
|
"learning_rate": 3.801221813394749e-05, |
|
"loss": 0.673, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.225, |
|
"grad_norm": 0.1614377647638321, |
|
"learning_rate": 3.76728233291801e-05, |
|
"loss": 0.1303, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.12167342752218246, |
|
"learning_rate": 3.7333428524412715e-05, |
|
"loss": 0.0674, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.275, |
|
"grad_norm": 142.9597930908203, |
|
"learning_rate": 3.699403371964533e-05, |
|
"loss": 0.5005, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.40126386284828186, |
|
"learning_rate": 3.665463891487794e-05, |
|
"loss": 0.0134, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.325, |
|
"grad_norm": 0.027333322912454605, |
|
"learning_rate": 3.631524411011055e-05, |
|
"loss": 0.2041, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 98.71678924560547, |
|
"learning_rate": 3.597584930534316e-05, |
|
"loss": 0.0721, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.375, |
|
"grad_norm": 0.028235169127583504, |
|
"learning_rate": 3.563645450057577e-05, |
|
"loss": 0.7024, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.030322661623358727, |
|
"learning_rate": 3.529705969580838e-05, |
|
"loss": 0.0277, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.425, |
|
"grad_norm": 15.570178985595703, |
|
"learning_rate": 3.495766489104099e-05, |
|
"loss": 0.3353, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.08365435898303986, |
|
"learning_rate": 3.4618270086273604e-05, |
|
"loss": 0.1971, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.475, |
|
"grad_norm": 0.02198941260576248, |
|
"learning_rate": 3.4278875281506216e-05, |
|
"loss": 0.3403, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.22028592228889465, |
|
"learning_rate": 3.393948047673883e-05, |
|
"loss": 0.1051, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.525, |
|
"grad_norm": 0.18908417224884033, |
|
"learning_rate": 3.360008567197144e-05, |
|
"loss": 0.2455, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.04513182491064072, |
|
"learning_rate": 3.326069086720405e-05, |
|
"loss": 0.1396, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.575, |
|
"grad_norm": 0.10827507078647614, |
|
"learning_rate": 3.2921296062436664e-05, |
|
"loss": 0.2477, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.41028207540512085, |
|
"learning_rate": 3.2581901257669276e-05, |
|
"loss": 0.1829, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.625, |
|
"grad_norm": 0.08836387097835541, |
|
"learning_rate": 3.224250645290189e-05, |
|
"loss": 0.2214, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.21761733293533325, |
|
"learning_rate": 3.190311164813449e-05, |
|
"loss": 0.2421, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.675, |
|
"grad_norm": 0.10760723054409027, |
|
"learning_rate": 3.156371684336711e-05, |
|
"loss": 0.0054, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.05878245085477829, |
|
"learning_rate": 3.1224322038599724e-05, |
|
"loss": 0.2122, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.725, |
|
"grad_norm": 0.18703211843967438, |
|
"learning_rate": 3.0884927233832336e-05, |
|
"loss": 0.1254, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.011866235174238682, |
|
"learning_rate": 3.054553242906495e-05, |
|
"loss": 0.1307, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.775, |
|
"grad_norm": 0.04749550670385361, |
|
"learning_rate": 3.0206137624297556e-05, |
|
"loss": 0.1111, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.06841249763965607, |
|
"learning_rate": 2.986674281953017e-05, |
|
"loss": 0.1828, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.825, |
|
"grad_norm": 0.020818322896957397, |
|
"learning_rate": 2.952734801476278e-05, |
|
"loss": 0.1772, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.1497318297624588, |
|
"learning_rate": 2.9187953209995392e-05, |
|
"loss": 0.5667, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.875, |
|
"grad_norm": 0.028121085837483406, |
|
"learning_rate": 2.8848558405228004e-05, |
|
"loss": 0.1637, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.15947194397449493, |
|
"learning_rate": 2.8509163600460613e-05, |
|
"loss": 0.1511, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.925, |
|
"grad_norm": 0.13598938286304474, |
|
"learning_rate": 2.8169768795693225e-05, |
|
"loss": 0.4279, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.09932050853967667, |
|
"learning_rate": 2.7830373990925837e-05, |
|
"loss": 0.2489, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.975, |
|
"grad_norm": 0.08448607474565506, |
|
"learning_rate": 2.7490979186158452e-05, |
|
"loss": 0.4415, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.4404872953891754, |
|
"learning_rate": 2.7151584381391064e-05, |
|
"loss": 0.0094, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.88, |
|
"eval_f1": 0.7966101694915254, |
|
"eval_loss": 0.543876051902771, |
|
"eval_precision": 0.831858407079646, |
|
"eval_recall": 0.7642276422764228, |
|
"eval_runtime": 1.5751, |
|
"eval_samples_per_second": 253.947, |
|
"eval_steps_per_second": 15.872, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.025, |
|
"grad_norm": 0.08914393186569214, |
|
"learning_rate": 2.6812189576623676e-05, |
|
"loss": 0.1131, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.07447825372219086, |
|
"learning_rate": 2.6472794771856288e-05, |
|
"loss": 0.0121, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.075, |
|
"grad_norm": 0.03912358358502388, |
|
"learning_rate": 2.6133399967088897e-05, |
|
"loss": 0.1766, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 13.849862098693848, |
|
"learning_rate": 2.579400516232151e-05, |
|
"loss": 0.2275, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.125, |
|
"grad_norm": 0.2011016458272934, |
|
"learning_rate": 2.545461035755412e-05, |
|
"loss": 0.2427, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 0.08804444223642349, |
|
"learning_rate": 2.5115215552786733e-05, |
|
"loss": 0.155, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.175, |
|
"grad_norm": 0.045624420046806335, |
|
"learning_rate": 2.4775820748019345e-05, |
|
"loss": 0.2603, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.10040140151977539, |
|
"learning_rate": 2.4436425943251953e-05, |
|
"loss": 0.0033, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.225, |
|
"grad_norm": 0.05396701395511627, |
|
"learning_rate": 2.4097031138484565e-05, |
|
"loss": 0.0031, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 0.06577543169260025, |
|
"learning_rate": 2.3757636333717177e-05, |
|
"loss": 0.1364, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.275, |
|
"grad_norm": 0.05943725258111954, |
|
"learning_rate": 2.341824152894979e-05, |
|
"loss": 0.2291, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 37.863914489746094, |
|
"learning_rate": 2.3078846724182405e-05, |
|
"loss": 0.2501, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.325, |
|
"grad_norm": 0.20205795764923096, |
|
"learning_rate": 2.2739451919415017e-05, |
|
"loss": 0.0971, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.11639241874217987, |
|
"learning_rate": 2.240005711464763e-05, |
|
"loss": 0.2033, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.375, |
|
"grad_norm": 0.06607875972986221, |
|
"learning_rate": 2.2060662309880237e-05, |
|
"loss": 0.0093, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.21797184646129608, |
|
"learning_rate": 2.172126750511285e-05, |
|
"loss": 0.1164, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.425, |
|
"grad_norm": 0.0903061032295227, |
|
"learning_rate": 2.138187270034546e-05, |
|
"loss": 0.0025, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.02832476980984211, |
|
"learning_rate": 2.1042477895578073e-05, |
|
"loss": 0.2609, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.475, |
|
"grad_norm": 0.0343223437666893, |
|
"learning_rate": 2.0703083090810685e-05, |
|
"loss": 0.0021, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 0.020103761926293373, |
|
"learning_rate": 2.0363688286043297e-05, |
|
"loss": 0.0046, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.525, |
|
"grad_norm": 0.13800527155399323, |
|
"learning_rate": 2.0024293481275906e-05, |
|
"loss": 0.3184, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.024840140715241432, |
|
"learning_rate": 1.9684898676508518e-05, |
|
"loss": 0.2926, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.575, |
|
"grad_norm": 0.030249882489442825, |
|
"learning_rate": 1.934550387174113e-05, |
|
"loss": 0.0027, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.019687151536345482, |
|
"learning_rate": 1.9006109066973745e-05, |
|
"loss": 0.0026, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.625, |
|
"grad_norm": 0.017106233164668083, |
|
"learning_rate": 1.8666714262206357e-05, |
|
"loss": 0.0023, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 0.01799299567937851, |
|
"learning_rate": 1.832731945743897e-05, |
|
"loss": 0.0019, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.675, |
|
"grad_norm": 0.024008911103010178, |
|
"learning_rate": 1.798792465267158e-05, |
|
"loss": 0.0532, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.1859898418188095, |
|
"learning_rate": 1.764852984790419e-05, |
|
"loss": 0.1062, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.725, |
|
"grad_norm": 13.769889831542969, |
|
"learning_rate": 1.7309135043136802e-05, |
|
"loss": 0.1398, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.015940789133310318, |
|
"learning_rate": 1.6969740238369414e-05, |
|
"loss": 0.0017, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.775, |
|
"grad_norm": 0.37872692942619324, |
|
"learning_rate": 1.6630345433602026e-05, |
|
"loss": 0.105, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.020107530057430267, |
|
"learning_rate": 1.6290950628834638e-05, |
|
"loss": 0.1373, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.825, |
|
"grad_norm": 0.23860657215118408, |
|
"learning_rate": 1.5951555824067247e-05, |
|
"loss": 0.0012, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 0.017826450988650322, |
|
"learning_rate": 1.5612161019299862e-05, |
|
"loss": 0.0029, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.875, |
|
"grad_norm": 0.18682965636253357, |
|
"learning_rate": 1.5272766214532474e-05, |
|
"loss": 0.3438, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 0.03294467553496361, |
|
"learning_rate": 1.4933371409765084e-05, |
|
"loss": 0.1961, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.925, |
|
"grad_norm": 0.01622551679611206, |
|
"learning_rate": 1.4593976604997696e-05, |
|
"loss": 0.0016, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 0.24873243272304535, |
|
"learning_rate": 1.4254581800230306e-05, |
|
"loss": 0.0017, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.975, |
|
"grad_norm": 0.5810887813568115, |
|
"learning_rate": 1.3915186995462918e-05, |
|
"loss": 0.1409, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.049918390810489655, |
|
"learning_rate": 1.3575792190695532e-05, |
|
"loss": 0.0492, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8925, |
|
"eval_f1": 0.8286852589641435, |
|
"eval_loss": 0.6152504682540894, |
|
"eval_precision": 0.8125, |
|
"eval_recall": 0.8455284552845529, |
|
"eval_runtime": 1.5277, |
|
"eval_samples_per_second": 261.825, |
|
"eval_steps_per_second": 16.364, |
|
"step": 1600 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 847261481803776.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": { |
|
"_wandb": {}, |
|
"assignments": {}, |
|
"learning_rate": 6.787896095347766e-05, |
|
"metric": "eval/loss", |
|
"num_train_epochs": 5, |
|
"per_device_train_batch_size": 4, |
|
"seed": 39 |
|
} |
|
} |
|
|