|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 693, |
|
"global_step": 1110, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0, |
|
"eval_f1": 0.14772727272727273, |
|
"eval_loss": 0.8850772380828857, |
|
"eval_precision": 0.08080808080808081, |
|
"eval_recall": 0.859504132231405, |
|
"eval_runtime": 99.3343, |
|
"eval_samples_per_second": 47.043, |
|
"eval_steps_per_second": 5.889, |
|
"step": 0 |
|
}, |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 6.000438690185547, |
|
"learning_rate": 4.981949458483755e-05, |
|
"loss": 0.3931, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 47.497467041015625, |
|
"learning_rate": 4.963898916967509e-05, |
|
"loss": 0.2607, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 13.464141845703125, |
|
"learning_rate": 4.945848375451264e-05, |
|
"loss": 0.1813, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 15.57222843170166, |
|
"learning_rate": 4.927797833935018e-05, |
|
"loss": 0.2618, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 11.892234802246094, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.1559, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 6.340595722198486, |
|
"learning_rate": 4.891696750902527e-05, |
|
"loss": 0.1648, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 10.196683883666992, |
|
"learning_rate": 4.873646209386282e-05, |
|
"loss": 0.1472, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 11.511515617370605, |
|
"learning_rate": 4.855595667870036e-05, |
|
"loss": 0.2108, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 8.681073188781738, |
|
"learning_rate": 4.837545126353791e-05, |
|
"loss": 0.1561, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 41.062801361083984, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.1563, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 19.93891716003418, |
|
"learning_rate": 4.8014440433213e-05, |
|
"loss": 0.1881, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 21.90877914428711, |
|
"learning_rate": 4.783393501805055e-05, |
|
"loss": 0.1262, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 14.123740196228027, |
|
"learning_rate": 4.765342960288809e-05, |
|
"loss": 0.1535, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 30.671157836914062, |
|
"learning_rate": 4.747292418772563e-05, |
|
"loss": 0.1458, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 7.560745716094971, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.1797, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 12.448927879333496, |
|
"learning_rate": 4.711191335740072e-05, |
|
"loss": 0.1898, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 20.040008544921875, |
|
"learning_rate": 4.693140794223827e-05, |
|
"loss": 0.1698, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 7.592446804046631, |
|
"learning_rate": 4.675090252707581e-05, |
|
"loss": 0.156, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 18.830904006958008, |
|
"learning_rate": 4.657039711191336e-05, |
|
"loss": 0.1536, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 3.684938430786133, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.1152, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 13.293309211730957, |
|
"learning_rate": 4.620938628158845e-05, |
|
"loss": 0.2116, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 14.630786895751953, |
|
"learning_rate": 4.602888086642599e-05, |
|
"loss": 0.1549, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 20.37642478942871, |
|
"learning_rate": 4.584837545126354e-05, |
|
"loss": 0.1679, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 13.183340072631836, |
|
"learning_rate": 4.566787003610109e-05, |
|
"loss": 0.1581, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 21.777263641357422, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.1618, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 20.683944702148438, |
|
"learning_rate": 4.530685920577618e-05, |
|
"loss": 0.1606, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 30.506752014160156, |
|
"learning_rate": 4.5126353790613716e-05, |
|
"loss": 0.1831, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 22.953815460205078, |
|
"learning_rate": 4.494584837545127e-05, |
|
"loss": 0.1565, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 6.177365303039551, |
|
"learning_rate": 4.4765342960288806e-05, |
|
"loss": 0.1287, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 8.373098373413086, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.1553, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 10.07934284210205, |
|
"learning_rate": 4.44043321299639e-05, |
|
"loss": 0.1868, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 18.686702728271484, |
|
"learning_rate": 4.422382671480145e-05, |
|
"loss": 0.1576, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 10.679413795471191, |
|
"learning_rate": 4.404332129963899e-05, |
|
"loss": 0.1409, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 11.381563186645508, |
|
"learning_rate": 4.386281588447654e-05, |
|
"loss": 0.2043, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 40.68292236328125, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.1288, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 17.46027946472168, |
|
"learning_rate": 4.350180505415163e-05, |
|
"loss": 0.1537, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 13.656320571899414, |
|
"learning_rate": 4.332129963898917e-05, |
|
"loss": 0.1201, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 20.412939071655273, |
|
"learning_rate": 4.314079422382672e-05, |
|
"loss": 0.1321, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 9.957433700561523, |
|
"learning_rate": 4.296028880866426e-05, |
|
"loss": 0.141, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 9.682558059692383, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.1607, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 9.733131408691406, |
|
"learning_rate": 4.259927797833935e-05, |
|
"loss": 0.1037, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 15.76230525970459, |
|
"learning_rate": 4.24187725631769e-05, |
|
"loss": 0.2101, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 22.166545867919922, |
|
"learning_rate": 4.223826714801444e-05, |
|
"loss": 0.1459, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 17.87278175354004, |
|
"learning_rate": 4.205776173285199e-05, |
|
"loss": 0.147, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 5.201169490814209, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.1247, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 8.68251895904541, |
|
"learning_rate": 4.169675090252708e-05, |
|
"loss": 0.1172, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 14.086528778076172, |
|
"learning_rate": 4.151624548736462e-05, |
|
"loss": 0.111, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 5.421584606170654, |
|
"learning_rate": 4.1335740072202167e-05, |
|
"loss": 0.1264, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 7.700821399688721, |
|
"learning_rate": 4.115523465703972e-05, |
|
"loss": 0.1278, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 9.820999145507812, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.1294, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 15.016349792480469, |
|
"learning_rate": 4.079422382671481e-05, |
|
"loss": 0.1607, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 10.43041706085205, |
|
"learning_rate": 4.0613718411552346e-05, |
|
"loss": 0.1091, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 26.99540901184082, |
|
"learning_rate": 4.043321299638989e-05, |
|
"loss": 0.1442, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 8.318906784057617, |
|
"learning_rate": 4.0252707581227436e-05, |
|
"loss": 0.1233, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 8.85923957824707, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.1466, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0090130689499774, |
|
"grad_norm": 4.69356107711792, |
|
"learning_rate": 3.989169675090253e-05, |
|
"loss": 0.1433, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0270392068499323, |
|
"grad_norm": 17.66107177734375, |
|
"learning_rate": 3.971119133574007e-05, |
|
"loss": 0.0944, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0450653447498874, |
|
"grad_norm": 4.721229553222656, |
|
"learning_rate": 3.953068592057762e-05, |
|
"loss": 0.1372, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0630914826498423, |
|
"grad_norm": 6.2178955078125, |
|
"learning_rate": 3.935018050541516e-05, |
|
"loss": 0.1412, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0811176205497972, |
|
"grad_norm": 6.32495641708374, |
|
"learning_rate": 3.916967509025271e-05, |
|
"loss": 0.0892, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.099143758449752, |
|
"grad_norm": 12.221674919128418, |
|
"learning_rate": 3.898916967509025e-05, |
|
"loss": 0.1333, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.117169896349707, |
|
"grad_norm": 1.7088432312011719, |
|
"learning_rate": 3.88086642599278e-05, |
|
"loss": 0.1077, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.135196034249662, |
|
"grad_norm": 17.136337280273438, |
|
"learning_rate": 3.862815884476535e-05, |
|
"loss": 0.1381, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.153222172149617, |
|
"grad_norm": 5.424814224243164, |
|
"learning_rate": 3.844765342960289e-05, |
|
"loss": 0.1228, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1712483100495719, |
|
"grad_norm": 18.775466918945312, |
|
"learning_rate": 3.826714801444044e-05, |
|
"loss": 0.099, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1892744479495267, |
|
"grad_norm": 8.731025695800781, |
|
"learning_rate": 3.8086642599277976e-05, |
|
"loss": 0.1324, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2073005858494819, |
|
"grad_norm": 9.53122615814209, |
|
"learning_rate": 3.790613718411553e-05, |
|
"loss": 0.1085, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2253267237494367, |
|
"grad_norm": 17.20035743713379, |
|
"learning_rate": 3.7725631768953066e-05, |
|
"loss": 0.0774, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2433528616493916, |
|
"grad_norm": 18.292253494262695, |
|
"learning_rate": 3.754512635379062e-05, |
|
"loss": 0.0965, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2487607030193781, |
|
"eval_f1": 0.7416879795396419, |
|
"eval_loss": 0.10558413714170456, |
|
"eval_precision": 0.6921241050119332, |
|
"eval_recall": 0.7988980716253443, |
|
"eval_runtime": 115.001, |
|
"eval_samples_per_second": 40.634, |
|
"eval_steps_per_second": 5.087, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.2613789995493465, |
|
"grad_norm": 6.922005653381348, |
|
"learning_rate": 3.7364620938628155e-05, |
|
"loss": 0.0906, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2794051374493014, |
|
"grad_norm": 33.214107513427734, |
|
"learning_rate": 3.718411552346571e-05, |
|
"loss": 0.072, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2974312753492563, |
|
"grad_norm": 9.25637149810791, |
|
"learning_rate": 3.700361010830325e-05, |
|
"loss": 0.1597, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3154574132492114, |
|
"grad_norm": 10.336318016052246, |
|
"learning_rate": 3.68231046931408e-05, |
|
"loss": 0.1063, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3334835511491663, |
|
"grad_norm": 11.632000923156738, |
|
"learning_rate": 3.664259927797834e-05, |
|
"loss": 0.1175, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3515096890491212, |
|
"grad_norm": 12.909192085266113, |
|
"learning_rate": 3.646209386281589e-05, |
|
"loss": 0.1285, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3695358269490763, |
|
"grad_norm": 7.545075416564941, |
|
"learning_rate": 3.628158844765343e-05, |
|
"loss": 0.0864, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3875619648490312, |
|
"grad_norm": 25.248464584350586, |
|
"learning_rate": 3.610108303249098e-05, |
|
"loss": 0.0769, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.405588102748986, |
|
"grad_norm": 10.5347318649292, |
|
"learning_rate": 3.592057761732852e-05, |
|
"loss": 0.088, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.423614240648941, |
|
"grad_norm": 13.101734161376953, |
|
"learning_rate": 3.574007220216607e-05, |
|
"loss": 0.1084, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4416403785488958, |
|
"grad_norm": 12.188843727111816, |
|
"learning_rate": 3.555956678700361e-05, |
|
"loss": 0.1101, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4596665164488507, |
|
"grad_norm": 11.552268981933594, |
|
"learning_rate": 3.537906137184116e-05, |
|
"loss": 0.0785, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4776926543488058, |
|
"grad_norm": 10.269721031188965, |
|
"learning_rate": 3.51985559566787e-05, |
|
"loss": 0.1161, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4957187922487607, |
|
"grad_norm": 12.09333610534668, |
|
"learning_rate": 3.5018050541516247e-05, |
|
"loss": 0.1113, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5137449301487156, |
|
"grad_norm": 5.4690141677856445, |
|
"learning_rate": 3.483754512635379e-05, |
|
"loss": 0.1046, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5317710680486707, |
|
"grad_norm": 6.296072959899902, |
|
"learning_rate": 3.4657039711191336e-05, |
|
"loss": 0.1202, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5497972059486256, |
|
"grad_norm": 8.854303359985352, |
|
"learning_rate": 3.447653429602888e-05, |
|
"loss": 0.0929, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5678233438485805, |
|
"grad_norm": 23.987024307250977, |
|
"learning_rate": 3.4296028880866426e-05, |
|
"loss": 0.1326, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5858494817485354, |
|
"grad_norm": 7.229393005371094, |
|
"learning_rate": 3.411552346570397e-05, |
|
"loss": 0.1169, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6038756196484902, |
|
"grad_norm": 5.463610649108887, |
|
"learning_rate": 3.3935018050541516e-05, |
|
"loss": 0.0809, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6219017575484451, |
|
"grad_norm": 5.9659624099731445, |
|
"learning_rate": 3.375451263537907e-05, |
|
"loss": 0.1265, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6399278954484002, |
|
"grad_norm": 17.582469940185547, |
|
"learning_rate": 3.3574007220216606e-05, |
|
"loss": 0.0846, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6579540333483551, |
|
"grad_norm": 13.633155822753906, |
|
"learning_rate": 3.339350180505416e-05, |
|
"loss": 0.0999, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.67598017124831, |
|
"grad_norm": 14.496593475341797, |
|
"learning_rate": 3.3212996389891696e-05, |
|
"loss": 0.0903, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.694006309148265, |
|
"grad_norm": 7.594356536865234, |
|
"learning_rate": 3.303249097472924e-05, |
|
"loss": 0.1021, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.71203244704822, |
|
"grad_norm": 20.628849029541016, |
|
"learning_rate": 3.2851985559566786e-05, |
|
"loss": 0.1131, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7300585849481749, |
|
"grad_norm": 8.25106430053711, |
|
"learning_rate": 3.267148014440433e-05, |
|
"loss": 0.0872, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.7480847228481298, |
|
"grad_norm": 17.08070182800293, |
|
"learning_rate": 3.249097472924188e-05, |
|
"loss": 0.0663, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7661108607480847, |
|
"grad_norm": 38.33162307739258, |
|
"learning_rate": 3.231046931407942e-05, |
|
"loss": 0.142, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7841369986480395, |
|
"grad_norm": 17.89957618713379, |
|
"learning_rate": 3.212996389891697e-05, |
|
"loss": 0.0942, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8021631365479944, |
|
"grad_norm": 9.691226959228516, |
|
"learning_rate": 3.194945848375451e-05, |
|
"loss": 0.0902, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8201892744479495, |
|
"grad_norm": 9.764283180236816, |
|
"learning_rate": 3.176895306859206e-05, |
|
"loss": 0.1108, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8382154123479044, |
|
"grad_norm": 13.829035758972168, |
|
"learning_rate": 3.15884476534296e-05, |
|
"loss": 0.0881, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8562415502478595, |
|
"grad_norm": 8.372889518737793, |
|
"learning_rate": 3.140794223826715e-05, |
|
"loss": 0.1272, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8742676881478144, |
|
"grad_norm": 12.576041221618652, |
|
"learning_rate": 3.12274368231047e-05, |
|
"loss": 0.1076, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8922938260477693, |
|
"grad_norm": 12.995355606079102, |
|
"learning_rate": 3.104693140794224e-05, |
|
"loss": 0.0932, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9103199639477242, |
|
"grad_norm": 6.626877784729004, |
|
"learning_rate": 3.086642599277979e-05, |
|
"loss": 0.105, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.928346101847679, |
|
"grad_norm": 11.209733009338379, |
|
"learning_rate": 3.0685920577617325e-05, |
|
"loss": 0.1331, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.946372239747634, |
|
"grad_norm": 7.832246780395508, |
|
"learning_rate": 3.0505415162454877e-05, |
|
"loss": 0.087, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9643983776475888, |
|
"grad_norm": 9.489190101623535, |
|
"learning_rate": 3.032490974729242e-05, |
|
"loss": 0.0904, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.982424515547544, |
|
"grad_norm": 14.940125465393066, |
|
"learning_rate": 3.0144404332129967e-05, |
|
"loss": 0.0959, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 8.826193809509277, |
|
"learning_rate": 2.996389891696751e-05, |
|
"loss": 0.1276, |
|
"step": 1110 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2770, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.858514256176742e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|