|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.996845425867508, |
|
"eval_steps": 500, |
|
"global_step": 1108, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 18.020910263061523, |
|
"learning_rate": 4.9548736462093865e-05, |
|
"loss": 0.6809, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 30.794422149658203, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.572, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 24.659021377563477, |
|
"learning_rate": 4.864620938628159e-05, |
|
"loss": 0.554, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 17.13719367980957, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.5261, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 50.614078521728516, |
|
"learning_rate": 4.7743682310469314e-05, |
|
"loss": 0.5696, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 16.31667137145996, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.4876, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 41.160762786865234, |
|
"learning_rate": 4.684115523465704e-05, |
|
"loss": 0.4647, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 20.278989791870117, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.4525, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 21.479433059692383, |
|
"learning_rate": 4.5938628158844764e-05, |
|
"loss": 0.4511, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 20.85803985595703, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.4828, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 19.689502716064453, |
|
"learning_rate": 4.5036101083032495e-05, |
|
"loss": 0.4785, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 23.28278350830078, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.4593, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 55.461483001708984, |
|
"learning_rate": 4.413357400722022e-05, |
|
"loss": 0.4265, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 21.591472625732422, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.4611, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 31.00871467590332, |
|
"learning_rate": 4.3231046931407945e-05, |
|
"loss": 0.4489, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 21.315185546875, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.4755, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 25.012041091918945, |
|
"learning_rate": 4.232851985559567e-05, |
|
"loss": 0.4703, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 18.25890350341797, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.4456, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 19.33783721923828, |
|
"learning_rate": 4.1425992779783394e-05, |
|
"loss": 0.4764, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 21.992473602294922, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.4204, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 20.253664016723633, |
|
"learning_rate": 4.052346570397112e-05, |
|
"loss": 0.4471, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 17.41029167175293, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.4387, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 18.28990364074707, |
|
"learning_rate": 3.962093862815885e-05, |
|
"loss": 0.4283, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 48.463008880615234, |
|
"learning_rate": 3.916967509025271e-05, |
|
"loss": 0.4447, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 15.58568000793457, |
|
"learning_rate": 3.8718411552346575e-05, |
|
"loss": 0.4257, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 31.015850067138672, |
|
"learning_rate": 3.826714801444044e-05, |
|
"loss": 0.4463, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 18.325271606445312, |
|
"learning_rate": 3.78158844765343e-05, |
|
"loss": 0.4207, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 20.241615295410156, |
|
"learning_rate": 3.7364620938628155e-05, |
|
"loss": 0.4084, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 19.043285369873047, |
|
"learning_rate": 3.6913357400722025e-05, |
|
"loss": 0.4345, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 19.568265914916992, |
|
"learning_rate": 3.646209386281589e-05, |
|
"loss": 0.4393, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 26.981136322021484, |
|
"learning_rate": 3.601083032490975e-05, |
|
"loss": 0.4532, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 17.03878402709961, |
|
"learning_rate": 3.555956678700361e-05, |
|
"loss": 0.4208, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 16.008604049682617, |
|
"learning_rate": 3.5108303249097474e-05, |
|
"loss": 0.4396, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 20.344444274902344, |
|
"learning_rate": 3.4657039711191336e-05, |
|
"loss": 0.4318, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 18.641759872436523, |
|
"learning_rate": 3.42057761732852e-05, |
|
"loss": 0.4481, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 25.64444351196289, |
|
"learning_rate": 3.375451263537907e-05, |
|
"loss": 0.4053, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 23.601179122924805, |
|
"learning_rate": 3.330324909747293e-05, |
|
"loss": 0.4463, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 29.48953628540039, |
|
"learning_rate": 3.2851985559566786e-05, |
|
"loss": 0.4086, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 15.503927230834961, |
|
"learning_rate": 3.240072202166065e-05, |
|
"loss": 0.4402, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 29.575830459594727, |
|
"learning_rate": 3.194945848375451e-05, |
|
"loss": 0.43, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 26.154592514038086, |
|
"learning_rate": 3.149819494584837e-05, |
|
"loss": 0.432, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 25.37771987915039, |
|
"learning_rate": 3.104693140794224e-05, |
|
"loss": 0.4277, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 31.5076904296875, |
|
"learning_rate": 3.0595667870036104e-05, |
|
"loss": 0.4131, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 16.534198760986328, |
|
"learning_rate": 3.0144404332129967e-05, |
|
"loss": 0.433, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 19.064289093017578, |
|
"learning_rate": 2.969314079422383e-05, |
|
"loss": 0.3964, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 18.9260196685791, |
|
"learning_rate": 2.924187725631769e-05, |
|
"loss": 0.396, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 27.17948341369629, |
|
"learning_rate": 2.879061371841155e-05, |
|
"loss": 0.4079, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 21.348743438720703, |
|
"learning_rate": 2.8339350180505413e-05, |
|
"loss": 0.4545, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 15.41019344329834, |
|
"learning_rate": 2.7888086642599282e-05, |
|
"loss": 0.4024, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 26.81648826599121, |
|
"learning_rate": 2.7436823104693144e-05, |
|
"loss": 0.3888, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 27.93416976928711, |
|
"learning_rate": 2.6985559566787007e-05, |
|
"loss": 0.4086, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 14.353543281555176, |
|
"learning_rate": 2.6534296028880866e-05, |
|
"loss": 0.416, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 15.367998123168945, |
|
"learning_rate": 2.6083032490974728e-05, |
|
"loss": 0.4184, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 22.00202178955078, |
|
"learning_rate": 2.563176895306859e-05, |
|
"loss": 0.4294, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 15.376652717590332, |
|
"learning_rate": 2.518050541516246e-05, |
|
"loss": 0.4068, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8437814956661963, |
|
"eval_fn": 317, |
|
"eval_fp": 458, |
|
"eval_loss": 0.3667502701282501, |
|
"eval_precision": 0.8204625637005096, |
|
"eval_recall": 0.8684647302904565, |
|
"eval_runtime": 14.8642, |
|
"eval_samples_per_second": 314.379, |
|
"eval_steps_per_second": 39.356, |
|
"eval_tn": 1805, |
|
"eval_tp": 2093, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0090130689499774, |
|
"grad_norm": 33.98088455200195, |
|
"learning_rate": 2.472924187725632e-05, |
|
"loss": 0.3578, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0270392068499323, |
|
"grad_norm": 24.8944091796875, |
|
"learning_rate": 2.427797833935018e-05, |
|
"loss": 0.3203, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0450653447498874, |
|
"grad_norm": 17.574607849121094, |
|
"learning_rate": 2.3826714801444043e-05, |
|
"loss": 0.335, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0630914826498423, |
|
"grad_norm": 26.903356552124023, |
|
"learning_rate": 2.3375451263537906e-05, |
|
"loss": 0.289, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0811176205497972, |
|
"grad_norm": 31.987625122070312, |
|
"learning_rate": 2.292418772563177e-05, |
|
"loss": 0.3511, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.099143758449752, |
|
"grad_norm": 29.54005241394043, |
|
"learning_rate": 2.2472924187725634e-05, |
|
"loss": 0.3126, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.117169896349707, |
|
"grad_norm": 22.088634490966797, |
|
"learning_rate": 2.2021660649819496e-05, |
|
"loss": 0.3004, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.135196034249662, |
|
"grad_norm": 30.805749893188477, |
|
"learning_rate": 2.157039711191336e-05, |
|
"loss": 0.3323, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.153222172149617, |
|
"grad_norm": 16.607519149780273, |
|
"learning_rate": 2.111913357400722e-05, |
|
"loss": 0.3345, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1712483100495719, |
|
"grad_norm": 25.248517990112305, |
|
"learning_rate": 2.0667870036101083e-05, |
|
"loss": 0.3001, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1892744479495267, |
|
"grad_norm": 29.825464248657227, |
|
"learning_rate": 2.0216606498194946e-05, |
|
"loss": 0.3146, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2073005858494819, |
|
"grad_norm": 30.085296630859375, |
|
"learning_rate": 1.976534296028881e-05, |
|
"loss": 0.3335, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2253267237494367, |
|
"grad_norm": 23.349525451660156, |
|
"learning_rate": 1.9314079422382674e-05, |
|
"loss": 0.27, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2433528616493916, |
|
"grad_norm": 34.48153305053711, |
|
"learning_rate": 1.8862815884476533e-05, |
|
"loss": 0.3333, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2613789995493465, |
|
"grad_norm": 49.01852798461914, |
|
"learning_rate": 1.84115523465704e-05, |
|
"loss": 0.275, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2794051374493014, |
|
"grad_norm": 24.860200881958008, |
|
"learning_rate": 1.796028880866426e-05, |
|
"loss": 0.311, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2974312753492563, |
|
"grad_norm": 30.17133331298828, |
|
"learning_rate": 1.7509025270758123e-05, |
|
"loss": 0.2988, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3154574132492114, |
|
"grad_norm": 47.71062469482422, |
|
"learning_rate": 1.7057761732851986e-05, |
|
"loss": 0.2696, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3334835511491663, |
|
"grad_norm": 40.605262756347656, |
|
"learning_rate": 1.6606498194945848e-05, |
|
"loss": 0.2966, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3515096890491212, |
|
"grad_norm": 23.67529296875, |
|
"learning_rate": 1.615523465703971e-05, |
|
"loss": 0.2967, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3695358269490763, |
|
"grad_norm": 25.661808013916016, |
|
"learning_rate": 1.5703971119133576e-05, |
|
"loss": 0.2856, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3875619648490312, |
|
"grad_norm": 21.13004493713379, |
|
"learning_rate": 1.5252707581227438e-05, |
|
"loss": 0.3415, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.405588102748986, |
|
"grad_norm": 24.510709762573242, |
|
"learning_rate": 1.4801444043321299e-05, |
|
"loss": 0.3044, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.423614240648941, |
|
"grad_norm": 22.614412307739258, |
|
"learning_rate": 1.4350180505415165e-05, |
|
"loss": 0.3042, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4416403785488958, |
|
"grad_norm": 26.001209259033203, |
|
"learning_rate": 1.3898916967509026e-05, |
|
"loss": 0.2906, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4596665164488507, |
|
"grad_norm": 26.30026626586914, |
|
"learning_rate": 1.3447653429602888e-05, |
|
"loss": 0.2603, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4776926543488058, |
|
"grad_norm": 26.163490295410156, |
|
"learning_rate": 1.299638989169675e-05, |
|
"loss": 0.2768, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4957187922487607, |
|
"grad_norm": 48.625335693359375, |
|
"learning_rate": 1.2545126353790614e-05, |
|
"loss": 0.3203, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5137449301487156, |
|
"grad_norm": 19.079364776611328, |
|
"learning_rate": 1.2093862815884477e-05, |
|
"loss": 0.3069, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5317710680486707, |
|
"grad_norm": 21.295671463012695, |
|
"learning_rate": 1.164259927797834e-05, |
|
"loss": 0.2658, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5497972059486256, |
|
"grad_norm": 26.641357421875, |
|
"learning_rate": 1.1191335740072201e-05, |
|
"loss": 0.281, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5678233438485805, |
|
"grad_norm": 33.70462417602539, |
|
"learning_rate": 1.0740072202166066e-05, |
|
"loss": 0.2754, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5858494817485354, |
|
"grad_norm": 30.827627182006836, |
|
"learning_rate": 1.028880866425993e-05, |
|
"loss": 0.2913, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6038756196484902, |
|
"grad_norm": 19.456621170043945, |
|
"learning_rate": 9.83754512635379e-06, |
|
"loss": 0.3002, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6219017575484451, |
|
"grad_norm": 22.82554817199707, |
|
"learning_rate": 9.386281588447654e-06, |
|
"loss": 0.3197, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6399278954484002, |
|
"grad_norm": 22.25665283203125, |
|
"learning_rate": 8.935018050541517e-06, |
|
"loss": 0.3069, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6579540333483551, |
|
"grad_norm": 30.897809982299805, |
|
"learning_rate": 8.483754512635379e-06, |
|
"loss": 0.2965, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.67598017124831, |
|
"grad_norm": 21.626707077026367, |
|
"learning_rate": 8.032490974729243e-06, |
|
"loss": 0.287, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.694006309148265, |
|
"grad_norm": 32.338802337646484, |
|
"learning_rate": 7.581227436823105e-06, |
|
"loss": 0.2863, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.71203244704822, |
|
"grad_norm": 21.633394241333008, |
|
"learning_rate": 7.129963898916968e-06, |
|
"loss": 0.2822, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7300585849481749, |
|
"grad_norm": 18.33571434020996, |
|
"learning_rate": 6.678700361010831e-06, |
|
"loss": 0.2676, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.7480847228481298, |
|
"grad_norm": 29.97617530822754, |
|
"learning_rate": 6.227436823104693e-06, |
|
"loss": 0.2652, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7661108607480847, |
|
"grad_norm": 37.65591812133789, |
|
"learning_rate": 5.776173285198557e-06, |
|
"loss": 0.2956, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7841369986480395, |
|
"grad_norm": 33.256507873535156, |
|
"learning_rate": 5.324909747292419e-06, |
|
"loss": 0.3358, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8021631365479944, |
|
"grad_norm": 24.32988739013672, |
|
"learning_rate": 4.873646209386281e-06, |
|
"loss": 0.3075, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8201892744479495, |
|
"grad_norm": 40.130863189697266, |
|
"learning_rate": 4.422382671480145e-06, |
|
"loss": 0.2682, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8382154123479044, |
|
"grad_norm": 20.9071044921875, |
|
"learning_rate": 3.971119133574008e-06, |
|
"loss": 0.2849, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8562415502478595, |
|
"grad_norm": 39.87733840942383, |
|
"learning_rate": 3.51985559566787e-06, |
|
"loss": 0.2967, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8742676881478144, |
|
"grad_norm": 27.898658752441406, |
|
"learning_rate": 3.068592057761733e-06, |
|
"loss": 0.2846, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8922938260477693, |
|
"grad_norm": 34.07933807373047, |
|
"learning_rate": 2.6173285198555957e-06, |
|
"loss": 0.2228, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9103199639477242, |
|
"grad_norm": 20.768266677856445, |
|
"learning_rate": 2.1660649819494585e-06, |
|
"loss": 0.269, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.928346101847679, |
|
"grad_norm": 29.845983505249023, |
|
"learning_rate": 1.7148014440433215e-06, |
|
"loss": 0.2628, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.946372239747634, |
|
"grad_norm": 31.16872787475586, |
|
"learning_rate": 1.263537906137184e-06, |
|
"loss": 0.2821, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9643983776475888, |
|
"grad_norm": 24.65738868713379, |
|
"learning_rate": 8.122743682310469e-07, |
|
"loss": 0.2961, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.982424515547544, |
|
"grad_norm": 33.531166076660156, |
|
"learning_rate": 3.610108303249098e-07, |
|
"loss": 0.2663, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.996845425867508, |
|
"eval_f1": 0.8471580638682467, |
|
"eval_fn": 301, |
|
"eval_fp": 460, |
|
"eval_loss": 0.38907328248023987, |
|
"eval_precision": 0.8209420007785131, |
|
"eval_recall": 0.875103734439834, |
|
"eval_runtime": 15.1098, |
|
"eval_samples_per_second": 309.27, |
|
"eval_steps_per_second": 38.717, |
|
"eval_tn": 1803, |
|
"eval_tp": 2109, |
|
"step": 1108 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1108, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9326760690401280.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|