axel-datos's picture
Upload folder using huggingface_hub
b46b23f verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.6690753378830456,
"eval_steps": 500,
"global_step": 5000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0026763013515321826,
"grad_norm": 13.1875,
"learning_rate": 9.973236986484679e-06,
"loss": 1.7384,
"step": 20
},
{
"epoch": 0.005352602703064365,
"grad_norm": 27.875,
"learning_rate": 9.946473972969357e-06,
"loss": 1.6485,
"step": 40
},
{
"epoch": 0.008028904054596548,
"grad_norm": 9.8125,
"learning_rate": 9.919710959454035e-06,
"loss": 1.4827,
"step": 60
},
{
"epoch": 0.01070520540612873,
"grad_norm": 6.4375,
"learning_rate": 9.892947945938713e-06,
"loss": 1.3008,
"step": 80
},
{
"epoch": 0.013381506757660913,
"grad_norm": 15.9375,
"learning_rate": 9.86618493242339e-06,
"loss": 1.24,
"step": 100
},
{
"epoch": 0.016057808109193095,
"grad_norm": 7.125,
"learning_rate": 9.83942191890807e-06,
"loss": 0.9532,
"step": 120
},
{
"epoch": 0.018734109460725276,
"grad_norm": 6.375,
"learning_rate": 9.812658905392748e-06,
"loss": 1.1199,
"step": 140
},
{
"epoch": 0.02141041081225746,
"grad_norm": 9.6875,
"learning_rate": 9.785895891877426e-06,
"loss": 1.2211,
"step": 160
},
{
"epoch": 0.02408671216378964,
"grad_norm": 8.8125,
"learning_rate": 9.759132878362104e-06,
"loss": 1.1512,
"step": 180
},
{
"epoch": 0.026763013515321826,
"grad_norm": 6.0625,
"learning_rate": 9.732369864846782e-06,
"loss": 1.2178,
"step": 200
},
{
"epoch": 0.029439314866854006,
"grad_norm": 12.0,
"learning_rate": 9.70560685133146e-06,
"loss": 0.9836,
"step": 220
},
{
"epoch": 0.03211561621838619,
"grad_norm": 19.5,
"learning_rate": 9.678843837816138e-06,
"loss": 1.1758,
"step": 240
},
{
"epoch": 0.034791917569918375,
"grad_norm": 11.6875,
"learning_rate": 9.652080824300816e-06,
"loss": 1.0029,
"step": 260
},
{
"epoch": 0.03746821892145055,
"grad_norm": 10.8125,
"learning_rate": 9.625317810785494e-06,
"loss": 0.917,
"step": 280
},
{
"epoch": 0.04014452027298274,
"grad_norm": 5.59375,
"learning_rate": 9.598554797270172e-06,
"loss": 0.8177,
"step": 300
},
{
"epoch": 0.04282082162451492,
"grad_norm": 7.75,
"learning_rate": 9.571791783754852e-06,
"loss": 0.8647,
"step": 320
},
{
"epoch": 0.045497122976047105,
"grad_norm": 13.0625,
"learning_rate": 9.54502877023953e-06,
"loss": 0.7716,
"step": 340
},
{
"epoch": 0.04817342432757928,
"grad_norm": 8.625,
"learning_rate": 9.518265756724208e-06,
"loss": 0.8501,
"step": 360
},
{
"epoch": 0.05084972567911147,
"grad_norm": 7.53125,
"learning_rate": 9.491502743208886e-06,
"loss": 0.772,
"step": 380
},
{
"epoch": 0.05352602703064365,
"grad_norm": 11.3125,
"learning_rate": 9.464739729693564e-06,
"loss": 0.7478,
"step": 400
},
{
"epoch": 0.056202328382175835,
"grad_norm": 13.6875,
"learning_rate": 9.437976716178242e-06,
"loss": 0.8187,
"step": 420
},
{
"epoch": 0.05887862973370801,
"grad_norm": 11.3125,
"learning_rate": 9.41121370266292e-06,
"loss": 0.729,
"step": 440
},
{
"epoch": 0.0615549310852402,
"grad_norm": 6.34375,
"learning_rate": 9.384450689147598e-06,
"loss": 0.7931,
"step": 460
},
{
"epoch": 0.06423123243677238,
"grad_norm": 23.0,
"learning_rate": 9.357687675632276e-06,
"loss": 0.7999,
"step": 480
},
{
"epoch": 0.06690753378830457,
"grad_norm": 11.125,
"learning_rate": 9.330924662116954e-06,
"loss": 0.7258,
"step": 500
},
{
"epoch": 0.06958383513983675,
"grad_norm": 33.75,
"learning_rate": 9.304161648601634e-06,
"loss": 0.8433,
"step": 520
},
{
"epoch": 0.07226013649136893,
"grad_norm": 9.125,
"learning_rate": 9.277398635086312e-06,
"loss": 0.7657,
"step": 540
},
{
"epoch": 0.0749364378429011,
"grad_norm": 12.875,
"learning_rate": 9.25063562157099e-06,
"loss": 0.668,
"step": 560
},
{
"epoch": 0.07761273919443329,
"grad_norm": 4.6875,
"learning_rate": 9.223872608055667e-06,
"loss": 0.7134,
"step": 580
},
{
"epoch": 0.08028904054596547,
"grad_norm": 13.125,
"learning_rate": 9.197109594540345e-06,
"loss": 0.6519,
"step": 600
},
{
"epoch": 0.08296534189749766,
"grad_norm": 8.5625,
"learning_rate": 9.170346581025023e-06,
"loss": 0.7203,
"step": 620
},
{
"epoch": 0.08564164324902984,
"grad_norm": 10.0,
"learning_rate": 9.143583567509703e-06,
"loss": 0.7479,
"step": 640
},
{
"epoch": 0.08831794460056203,
"grad_norm": 8.6875,
"learning_rate": 9.116820553994381e-06,
"loss": 0.7291,
"step": 660
},
{
"epoch": 0.09099424595209421,
"grad_norm": 9.125,
"learning_rate": 9.090057540479059e-06,
"loss": 0.7096,
"step": 680
},
{
"epoch": 0.0936705473036264,
"grad_norm": 11.6875,
"learning_rate": 9.063294526963737e-06,
"loss": 0.7773,
"step": 700
},
{
"epoch": 0.09634684865515857,
"grad_norm": 6.4375,
"learning_rate": 9.036531513448415e-06,
"loss": 0.7675,
"step": 720
},
{
"epoch": 0.09902315000669075,
"grad_norm": 24.5,
"learning_rate": 9.009768499933093e-06,
"loss": 0.7259,
"step": 740
},
{
"epoch": 0.10169945135822293,
"grad_norm": 7.84375,
"learning_rate": 8.983005486417771e-06,
"loss": 0.7389,
"step": 760
},
{
"epoch": 0.10437575270975512,
"grad_norm": 14.0,
"learning_rate": 8.95624247290245e-06,
"loss": 0.7706,
"step": 780
},
{
"epoch": 0.1070520540612873,
"grad_norm": 11.6875,
"learning_rate": 8.929479459387129e-06,
"loss": 0.7571,
"step": 800
},
{
"epoch": 0.10972835541281949,
"grad_norm": 18.125,
"learning_rate": 8.902716445871807e-06,
"loss": 0.7931,
"step": 820
},
{
"epoch": 0.11240465676435167,
"grad_norm": 19.0,
"learning_rate": 8.875953432356485e-06,
"loss": 0.6576,
"step": 840
},
{
"epoch": 0.11508095811588386,
"grad_norm": 21.0,
"learning_rate": 8.849190418841163e-06,
"loss": 0.8381,
"step": 860
},
{
"epoch": 0.11775725946741603,
"grad_norm": 16.375,
"learning_rate": 8.82242740532584e-06,
"loss": 0.6927,
"step": 880
},
{
"epoch": 0.12043356081894821,
"grad_norm": 7.0625,
"learning_rate": 8.795664391810519e-06,
"loss": 0.725,
"step": 900
},
{
"epoch": 0.1231098621704804,
"grad_norm": 16.375,
"learning_rate": 8.768901378295197e-06,
"loss": 0.6435,
"step": 920
},
{
"epoch": 0.12578616352201258,
"grad_norm": 4.8125,
"learning_rate": 8.742138364779875e-06,
"loss": 0.7177,
"step": 940
},
{
"epoch": 0.12846246487354476,
"grad_norm": 9.375,
"learning_rate": 8.715375351264553e-06,
"loss": 0.6612,
"step": 960
},
{
"epoch": 0.13113876622507695,
"grad_norm": 7.1875,
"learning_rate": 8.688612337749232e-06,
"loss": 0.6302,
"step": 980
},
{
"epoch": 0.13381506757660913,
"grad_norm": 5.5,
"learning_rate": 8.66184932423391e-06,
"loss": 0.6037,
"step": 1000
},
{
"epoch": 0.13649136892814132,
"grad_norm": 7.34375,
"learning_rate": 8.635086310718588e-06,
"loss": 0.6317,
"step": 1020
},
{
"epoch": 0.1391676702796735,
"grad_norm": 15.5625,
"learning_rate": 8.608323297203266e-06,
"loss": 0.6688,
"step": 1040
},
{
"epoch": 0.14184397163120568,
"grad_norm": 4.53125,
"learning_rate": 8.581560283687944e-06,
"loss": 0.6914,
"step": 1060
},
{
"epoch": 0.14452027298273787,
"grad_norm": 9.25,
"learning_rate": 8.554797270172622e-06,
"loss": 0.6758,
"step": 1080
},
{
"epoch": 0.14719657433427003,
"grad_norm": 12.4375,
"learning_rate": 8.5280342566573e-06,
"loss": 0.7167,
"step": 1100
},
{
"epoch": 0.1498728756858022,
"grad_norm": 11.6875,
"learning_rate": 8.501271243141978e-06,
"loss": 0.7415,
"step": 1120
},
{
"epoch": 0.1525491770373344,
"grad_norm": 18.5,
"learning_rate": 8.474508229626656e-06,
"loss": 0.7248,
"step": 1140
},
{
"epoch": 0.15522547838886658,
"grad_norm": 9.25,
"learning_rate": 8.447745216111334e-06,
"loss": 0.7272,
"step": 1160
},
{
"epoch": 0.15790177974039876,
"grad_norm": 15.0625,
"learning_rate": 8.420982202596014e-06,
"loss": 0.713,
"step": 1180
},
{
"epoch": 0.16057808109193095,
"grad_norm": 22.375,
"learning_rate": 8.394219189080692e-06,
"loss": 0.7674,
"step": 1200
},
{
"epoch": 0.16325438244346313,
"grad_norm": 11.0625,
"learning_rate": 8.36745617556537e-06,
"loss": 0.6709,
"step": 1220
},
{
"epoch": 0.16593068379499532,
"grad_norm": 6.75,
"learning_rate": 8.340693162050048e-06,
"loss": 0.6167,
"step": 1240
},
{
"epoch": 0.1686069851465275,
"grad_norm": 15.0625,
"learning_rate": 8.313930148534726e-06,
"loss": 0.6662,
"step": 1260
},
{
"epoch": 0.17128328649805968,
"grad_norm": 7.9375,
"learning_rate": 8.287167135019404e-06,
"loss": 0.6959,
"step": 1280
},
{
"epoch": 0.17395958784959187,
"grad_norm": 7.625,
"learning_rate": 8.260404121504082e-06,
"loss": 0.7078,
"step": 1300
},
{
"epoch": 0.17663588920112405,
"grad_norm": 4.78125,
"learning_rate": 8.23364110798876e-06,
"loss": 0.7281,
"step": 1320
},
{
"epoch": 0.17931219055265624,
"grad_norm": 38.75,
"learning_rate": 8.206878094473438e-06,
"loss": 0.707,
"step": 1340
},
{
"epoch": 0.18198849190418842,
"grad_norm": 10.6875,
"learning_rate": 8.180115080958116e-06,
"loss": 0.7312,
"step": 1360
},
{
"epoch": 0.1846647932557206,
"grad_norm": 11.25,
"learning_rate": 8.153352067442795e-06,
"loss": 0.7054,
"step": 1380
},
{
"epoch": 0.1873410946072528,
"grad_norm": 14.25,
"learning_rate": 8.126589053927473e-06,
"loss": 0.6535,
"step": 1400
},
{
"epoch": 0.19001739595878495,
"grad_norm": 9.0,
"learning_rate": 8.099826040412151e-06,
"loss": 0.6309,
"step": 1420
},
{
"epoch": 0.19269369731031713,
"grad_norm": 24.875,
"learning_rate": 8.07306302689683e-06,
"loss": 0.6625,
"step": 1440
},
{
"epoch": 0.19536999866184931,
"grad_norm": 9.9375,
"learning_rate": 8.046300013381507e-06,
"loss": 0.702,
"step": 1460
},
{
"epoch": 0.1980463000133815,
"grad_norm": 6.46875,
"learning_rate": 8.019536999866185e-06,
"loss": 0.7289,
"step": 1480
},
{
"epoch": 0.20072260136491368,
"grad_norm": 7.3125,
"learning_rate": 7.992773986350863e-06,
"loss": 0.6527,
"step": 1500
},
{
"epoch": 0.20339890271644587,
"grad_norm": 7.46875,
"learning_rate": 7.966010972835541e-06,
"loss": 0.6631,
"step": 1520
},
{
"epoch": 0.20607520406797805,
"grad_norm": 12.5,
"learning_rate": 7.93924795932022e-06,
"loss": 0.5758,
"step": 1540
},
{
"epoch": 0.20875150541951024,
"grad_norm": 4.875,
"learning_rate": 7.912484945804897e-06,
"loss": 0.6604,
"step": 1560
},
{
"epoch": 0.21142780677104242,
"grad_norm": 9.125,
"learning_rate": 7.885721932289577e-06,
"loss": 0.6006,
"step": 1580
},
{
"epoch": 0.2141041081225746,
"grad_norm": 11.0625,
"learning_rate": 7.858958918774255e-06,
"loss": 0.5989,
"step": 1600
},
{
"epoch": 0.2167804094741068,
"grad_norm": 11.75,
"learning_rate": 7.832195905258933e-06,
"loss": 0.709,
"step": 1620
},
{
"epoch": 0.21945671082563897,
"grad_norm": 16.375,
"learning_rate": 7.805432891743611e-06,
"loss": 0.6903,
"step": 1640
},
{
"epoch": 0.22213301217717116,
"grad_norm": 9.4375,
"learning_rate": 7.778669878228289e-06,
"loss": 0.6708,
"step": 1660
},
{
"epoch": 0.22480931352870334,
"grad_norm": 9.6875,
"learning_rate": 7.751906864712967e-06,
"loss": 0.7066,
"step": 1680
},
{
"epoch": 0.22748561488023553,
"grad_norm": 5.71875,
"learning_rate": 7.725143851197645e-06,
"loss": 0.6876,
"step": 1700
},
{
"epoch": 0.2301619162317677,
"grad_norm": 10.0625,
"learning_rate": 7.698380837682323e-06,
"loss": 0.6687,
"step": 1720
},
{
"epoch": 0.23283821758329987,
"grad_norm": 22.875,
"learning_rate": 7.671617824167e-06,
"loss": 0.6319,
"step": 1740
},
{
"epoch": 0.23551451893483205,
"grad_norm": 10.9375,
"learning_rate": 7.644854810651679e-06,
"loss": 0.6421,
"step": 1760
},
{
"epoch": 0.23819082028636424,
"grad_norm": 42.5,
"learning_rate": 7.618091797136358e-06,
"loss": 0.607,
"step": 1780
},
{
"epoch": 0.24086712163789642,
"grad_norm": 11.8125,
"learning_rate": 7.591328783621036e-06,
"loss": 0.6368,
"step": 1800
},
{
"epoch": 0.2435434229894286,
"grad_norm": 8.0625,
"learning_rate": 7.564565770105714e-06,
"loss": 0.693,
"step": 1820
},
{
"epoch": 0.2462197243409608,
"grad_norm": 21.75,
"learning_rate": 7.5378027565903925e-06,
"loss": 0.735,
"step": 1840
},
{
"epoch": 0.24889602569249297,
"grad_norm": 25.5,
"learning_rate": 7.5110397430750704e-06,
"loss": 0.6872,
"step": 1860
},
{
"epoch": 0.25157232704402516,
"grad_norm": 5.5,
"learning_rate": 7.484276729559748e-06,
"loss": 0.7235,
"step": 1880
},
{
"epoch": 0.25424862839555734,
"grad_norm": 5.875,
"learning_rate": 7.457513716044428e-06,
"loss": 0.6518,
"step": 1900
},
{
"epoch": 0.2569249297470895,
"grad_norm": 7.03125,
"learning_rate": 7.430750702529106e-06,
"loss": 0.6251,
"step": 1920
},
{
"epoch": 0.2596012310986217,
"grad_norm": 5.59375,
"learning_rate": 7.403987689013784e-06,
"loss": 0.6323,
"step": 1940
},
{
"epoch": 0.2622775324501539,
"grad_norm": 5.65625,
"learning_rate": 7.377224675498462e-06,
"loss": 0.7128,
"step": 1960
},
{
"epoch": 0.2649538338016861,
"grad_norm": 5.53125,
"learning_rate": 7.35046166198314e-06,
"loss": 0.6354,
"step": 1980
},
{
"epoch": 0.26763013515321826,
"grad_norm": 6.59375,
"learning_rate": 7.323698648467819e-06,
"loss": 0.6783,
"step": 2000
},
{
"epoch": 0.27030643650475045,
"grad_norm": 9.5,
"learning_rate": 7.296935634952497e-06,
"loss": 0.6546,
"step": 2020
},
{
"epoch": 0.27298273785628263,
"grad_norm": 14.6875,
"learning_rate": 7.270172621437175e-06,
"loss": 0.7473,
"step": 2040
},
{
"epoch": 0.2756590392078148,
"grad_norm": 4.25,
"learning_rate": 7.243409607921853e-06,
"loss": 0.5906,
"step": 2060
},
{
"epoch": 0.278335340559347,
"grad_norm": 7.53125,
"learning_rate": 7.216646594406531e-06,
"loss": 0.6394,
"step": 2080
},
{
"epoch": 0.2810116419108792,
"grad_norm": 10.25,
"learning_rate": 7.18988358089121e-06,
"loss": 0.6928,
"step": 2100
},
{
"epoch": 0.28368794326241137,
"grad_norm": 14.0625,
"learning_rate": 7.163120567375888e-06,
"loss": 0.7175,
"step": 2120
},
{
"epoch": 0.28636424461394355,
"grad_norm": 5.9375,
"learning_rate": 7.136357553860566e-06,
"loss": 0.6362,
"step": 2140
},
{
"epoch": 0.28904054596547574,
"grad_norm": 17.125,
"learning_rate": 7.109594540345244e-06,
"loss": 0.6462,
"step": 2160
},
{
"epoch": 0.2917168473170079,
"grad_norm": 6.3125,
"learning_rate": 7.082831526829922e-06,
"loss": 0.6098,
"step": 2180
},
{
"epoch": 0.29439314866854005,
"grad_norm": 13.8125,
"learning_rate": 7.0560685133146004e-06,
"loss": 0.6703,
"step": 2200
},
{
"epoch": 0.29706945002007223,
"grad_norm": 10.1875,
"learning_rate": 7.029305499799278e-06,
"loss": 0.6079,
"step": 2220
},
{
"epoch": 0.2997457513716044,
"grad_norm": 25.875,
"learning_rate": 7.002542486283956e-06,
"loss": 0.6221,
"step": 2240
},
{
"epoch": 0.3024220527231366,
"grad_norm": 13.5625,
"learning_rate": 6.975779472768634e-06,
"loss": 0.7642,
"step": 2260
},
{
"epoch": 0.3050983540746688,
"grad_norm": 21.0,
"learning_rate": 6.949016459253312e-06,
"loss": 0.5935,
"step": 2280
},
{
"epoch": 0.30777465542620097,
"grad_norm": 9.625,
"learning_rate": 6.922253445737991e-06,
"loss": 0.658,
"step": 2300
},
{
"epoch": 0.31045095677773316,
"grad_norm": 9.6875,
"learning_rate": 6.895490432222669e-06,
"loss": 0.653,
"step": 2320
},
{
"epoch": 0.31312725812926534,
"grad_norm": 18.125,
"learning_rate": 6.868727418707347e-06,
"loss": 0.6094,
"step": 2340
},
{
"epoch": 0.3158035594807975,
"grad_norm": 10.625,
"learning_rate": 6.841964405192025e-06,
"loss": 0.6108,
"step": 2360
},
{
"epoch": 0.3184798608323297,
"grad_norm": 8.25,
"learning_rate": 6.815201391676703e-06,
"loss": 0.6254,
"step": 2380
},
{
"epoch": 0.3211561621838619,
"grad_norm": 7.0625,
"learning_rate": 6.788438378161382e-06,
"loss": 0.5189,
"step": 2400
},
{
"epoch": 0.3238324635353941,
"grad_norm": 12.3125,
"learning_rate": 6.76167536464606e-06,
"loss": 0.6326,
"step": 2420
},
{
"epoch": 0.32650876488692626,
"grad_norm": 8.4375,
"learning_rate": 6.734912351130738e-06,
"loss": 0.541,
"step": 2440
},
{
"epoch": 0.32918506623845845,
"grad_norm": 8.125,
"learning_rate": 6.708149337615416e-06,
"loss": 0.627,
"step": 2460
},
{
"epoch": 0.33186136758999063,
"grad_norm": 9.6875,
"learning_rate": 6.681386324100094e-06,
"loss": 0.5483,
"step": 2480
},
{
"epoch": 0.3345376689415228,
"grad_norm": 4.8125,
"learning_rate": 6.654623310584773e-06,
"loss": 0.6937,
"step": 2500
},
{
"epoch": 0.337213970293055,
"grad_norm": 7.40625,
"learning_rate": 6.627860297069451e-06,
"loss": 0.7344,
"step": 2520
},
{
"epoch": 0.3398902716445872,
"grad_norm": 6.3125,
"learning_rate": 6.601097283554129e-06,
"loss": 0.5555,
"step": 2540
},
{
"epoch": 0.34256657299611937,
"grad_norm": 8.4375,
"learning_rate": 6.574334270038807e-06,
"loss": 0.6078,
"step": 2560
},
{
"epoch": 0.34524287434765155,
"grad_norm": 35.25,
"learning_rate": 6.547571256523485e-06,
"loss": 0.5849,
"step": 2580
},
{
"epoch": 0.34791917569918374,
"grad_norm": 5.3125,
"learning_rate": 6.5208082430081635e-06,
"loss": 0.5821,
"step": 2600
},
{
"epoch": 0.3505954770507159,
"grad_norm": 11.25,
"learning_rate": 6.4940452294928415e-06,
"loss": 0.6821,
"step": 2620
},
{
"epoch": 0.3532717784022481,
"grad_norm": 4.40625,
"learning_rate": 6.4672822159775195e-06,
"loss": 0.608,
"step": 2640
},
{
"epoch": 0.3559480797537803,
"grad_norm": 9.75,
"learning_rate": 6.4405192024621975e-06,
"loss": 0.6809,
"step": 2660
},
{
"epoch": 0.3586243811053125,
"grad_norm": 4.9375,
"learning_rate": 6.4137561889468755e-06,
"loss": 0.5272,
"step": 2680
},
{
"epoch": 0.36130068245684466,
"grad_norm": 14.25,
"learning_rate": 6.386993175431554e-06,
"loss": 0.5292,
"step": 2700
},
{
"epoch": 0.36397698380837684,
"grad_norm": 12.125,
"learning_rate": 6.360230161916232e-06,
"loss": 0.6039,
"step": 2720
},
{
"epoch": 0.366653285159909,
"grad_norm": 5.71875,
"learning_rate": 6.33346714840091e-06,
"loss": 0.681,
"step": 2740
},
{
"epoch": 0.3693295865114412,
"grad_norm": 24.5,
"learning_rate": 6.306704134885588e-06,
"loss": 0.6367,
"step": 2760
},
{
"epoch": 0.3720058878629734,
"grad_norm": 5.84375,
"learning_rate": 6.279941121370266e-06,
"loss": 0.6731,
"step": 2780
},
{
"epoch": 0.3746821892145056,
"grad_norm": 16.125,
"learning_rate": 6.253178107854945e-06,
"loss": 0.5908,
"step": 2800
},
{
"epoch": 0.37735849056603776,
"grad_norm": 11.0,
"learning_rate": 6.226415094339623e-06,
"loss": 0.6916,
"step": 2820
},
{
"epoch": 0.3800347919175699,
"grad_norm": 8.75,
"learning_rate": 6.199652080824301e-06,
"loss": 0.5824,
"step": 2840
},
{
"epoch": 0.3827110932691021,
"grad_norm": 6.59375,
"learning_rate": 6.172889067308979e-06,
"loss": 0.5781,
"step": 2860
},
{
"epoch": 0.38538739462063426,
"grad_norm": 12.75,
"learning_rate": 6.146126053793657e-06,
"loss": 0.5776,
"step": 2880
},
{
"epoch": 0.38806369597216644,
"grad_norm": 16.5,
"learning_rate": 6.119363040278336e-06,
"loss": 0.6522,
"step": 2900
},
{
"epoch": 0.39073999732369863,
"grad_norm": 6.90625,
"learning_rate": 6.092600026763014e-06,
"loss": 0.5461,
"step": 2920
},
{
"epoch": 0.3934162986752308,
"grad_norm": 4.4375,
"learning_rate": 6.065837013247692e-06,
"loss": 0.6251,
"step": 2940
},
{
"epoch": 0.396092600026763,
"grad_norm": 27.0,
"learning_rate": 6.03907399973237e-06,
"loss": 0.5881,
"step": 2960
},
{
"epoch": 0.3987689013782952,
"grad_norm": 23.625,
"learning_rate": 6.012310986217048e-06,
"loss": 0.5181,
"step": 2980
},
{
"epoch": 0.40144520272982737,
"grad_norm": 7.375,
"learning_rate": 5.985547972701727e-06,
"loss": 0.666,
"step": 3000
},
{
"epoch": 0.40412150408135955,
"grad_norm": 16.125,
"learning_rate": 5.958784959186405e-06,
"loss": 0.6627,
"step": 3020
},
{
"epoch": 0.40679780543289173,
"grad_norm": 7.09375,
"learning_rate": 5.932021945671083e-06,
"loss": 0.6242,
"step": 3040
},
{
"epoch": 0.4094741067844239,
"grad_norm": 11.5,
"learning_rate": 5.905258932155761e-06,
"loss": 0.6298,
"step": 3060
},
{
"epoch": 0.4121504081359561,
"grad_norm": 8.0625,
"learning_rate": 5.878495918640439e-06,
"loss": 0.5965,
"step": 3080
},
{
"epoch": 0.4148267094874883,
"grad_norm": 11.1875,
"learning_rate": 5.851732905125117e-06,
"loss": 0.6369,
"step": 3100
},
{
"epoch": 0.41750301083902047,
"grad_norm": 8.5,
"learning_rate": 5.824969891609795e-06,
"loss": 0.6442,
"step": 3120
},
{
"epoch": 0.42017931219055266,
"grad_norm": 33.0,
"learning_rate": 5.798206878094474e-06,
"loss": 0.6203,
"step": 3140
},
{
"epoch": 0.42285561354208484,
"grad_norm": 5.0625,
"learning_rate": 5.771443864579152e-06,
"loss": 0.6406,
"step": 3160
},
{
"epoch": 0.425531914893617,
"grad_norm": 10.4375,
"learning_rate": 5.744680851063831e-06,
"loss": 0.6375,
"step": 3180
},
{
"epoch": 0.4282082162451492,
"grad_norm": 5.625,
"learning_rate": 5.717917837548509e-06,
"loss": 0.6048,
"step": 3200
},
{
"epoch": 0.4308845175966814,
"grad_norm": 7.75,
"learning_rate": 5.691154824033187e-06,
"loss": 0.6119,
"step": 3220
},
{
"epoch": 0.4335608189482136,
"grad_norm": 6.75,
"learning_rate": 5.664391810517865e-06,
"loss": 0.6523,
"step": 3240
},
{
"epoch": 0.43623712029974576,
"grad_norm": 7.09375,
"learning_rate": 5.637628797002543e-06,
"loss": 0.6033,
"step": 3260
},
{
"epoch": 0.43891342165127795,
"grad_norm": 9.1875,
"learning_rate": 5.610865783487222e-06,
"loss": 0.5765,
"step": 3280
},
{
"epoch": 0.44158972300281013,
"grad_norm": 4.90625,
"learning_rate": 5.5841027699719e-06,
"loss": 0.5753,
"step": 3300
},
{
"epoch": 0.4442660243543423,
"grad_norm": 5.5625,
"learning_rate": 5.557339756456578e-06,
"loss": 0.6599,
"step": 3320
},
{
"epoch": 0.4469423257058745,
"grad_norm": 9.9375,
"learning_rate": 5.530576742941256e-06,
"loss": 0.5884,
"step": 3340
},
{
"epoch": 0.4496186270574067,
"grad_norm": 5.65625,
"learning_rate": 5.503813729425934e-06,
"loss": 0.5796,
"step": 3360
},
{
"epoch": 0.45229492840893887,
"grad_norm": 11.0625,
"learning_rate": 5.477050715910613e-06,
"loss": 0.664,
"step": 3380
},
{
"epoch": 0.45497122976047105,
"grad_norm": 7.3125,
"learning_rate": 5.450287702395291e-06,
"loss": 0.5953,
"step": 3400
},
{
"epoch": 0.45764753111200324,
"grad_norm": 8.6875,
"learning_rate": 5.4235246888799686e-06,
"loss": 0.6893,
"step": 3420
},
{
"epoch": 0.4603238324635354,
"grad_norm": 13.375,
"learning_rate": 5.3967616753646466e-06,
"loss": 0.6659,
"step": 3440
},
{
"epoch": 0.46300013381506755,
"grad_norm": 8.625,
"learning_rate": 5.3699986618493245e-06,
"loss": 0.578,
"step": 3460
},
{
"epoch": 0.46567643516659973,
"grad_norm": 7.40625,
"learning_rate": 5.343235648334003e-06,
"loss": 0.651,
"step": 3480
},
{
"epoch": 0.4683527365181319,
"grad_norm": 30.875,
"learning_rate": 5.316472634818681e-06,
"loss": 0.6255,
"step": 3500
},
{
"epoch": 0.4710290378696641,
"grad_norm": 4.9375,
"learning_rate": 5.289709621303359e-06,
"loss": 0.514,
"step": 3520
},
{
"epoch": 0.4737053392211963,
"grad_norm": 7.84375,
"learning_rate": 5.262946607788037e-06,
"loss": 0.6143,
"step": 3540
},
{
"epoch": 0.47638164057272847,
"grad_norm": 7.875,
"learning_rate": 5.236183594272715e-06,
"loss": 0.7061,
"step": 3560
},
{
"epoch": 0.47905794192426066,
"grad_norm": 14.9375,
"learning_rate": 5.209420580757394e-06,
"loss": 0.5776,
"step": 3580
},
{
"epoch": 0.48173424327579284,
"grad_norm": 6.71875,
"learning_rate": 5.182657567242072e-06,
"loss": 0.6318,
"step": 3600
},
{
"epoch": 0.484410544627325,
"grad_norm": 18.5,
"learning_rate": 5.15589455372675e-06,
"loss": 0.6403,
"step": 3620
},
{
"epoch": 0.4870868459788572,
"grad_norm": 9.625,
"learning_rate": 5.129131540211428e-06,
"loss": 0.6161,
"step": 3640
},
{
"epoch": 0.4897631473303894,
"grad_norm": 6.09375,
"learning_rate": 5.102368526696106e-06,
"loss": 0.6192,
"step": 3660
},
{
"epoch": 0.4924394486819216,
"grad_norm": 5.0,
"learning_rate": 5.075605513180785e-06,
"loss": 0.61,
"step": 3680
},
{
"epoch": 0.49511575003345376,
"grad_norm": 5.9375,
"learning_rate": 5.048842499665463e-06,
"loss": 0.6077,
"step": 3700
},
{
"epoch": 0.49779205138498595,
"grad_norm": 48.25,
"learning_rate": 5.022079486150141e-06,
"loss": 0.6666,
"step": 3720
},
{
"epoch": 0.5004683527365181,
"grad_norm": 5.71875,
"learning_rate": 4.995316472634819e-06,
"loss": 0.6599,
"step": 3740
},
{
"epoch": 0.5031446540880503,
"grad_norm": 5.75,
"learning_rate": 4.968553459119497e-06,
"loss": 0.5544,
"step": 3760
},
{
"epoch": 0.5058209554395825,
"grad_norm": 6.21875,
"learning_rate": 4.941790445604176e-06,
"loss": 0.6117,
"step": 3780
},
{
"epoch": 0.5084972567911147,
"grad_norm": 10.5625,
"learning_rate": 4.915027432088854e-06,
"loss": 0.6413,
"step": 3800
},
{
"epoch": 0.5111735581426469,
"grad_norm": 6.65625,
"learning_rate": 4.888264418573532e-06,
"loss": 0.5872,
"step": 3820
},
{
"epoch": 0.513849859494179,
"grad_norm": 10.375,
"learning_rate": 4.86150140505821e-06,
"loss": 0.5602,
"step": 3840
},
{
"epoch": 0.5165261608457112,
"grad_norm": 6.03125,
"learning_rate": 4.834738391542888e-06,
"loss": 0.6806,
"step": 3860
},
{
"epoch": 0.5192024621972434,
"grad_norm": 7.0,
"learning_rate": 4.8079753780275665e-06,
"loss": 0.632,
"step": 3880
},
{
"epoch": 0.5218787635487756,
"grad_norm": 6.03125,
"learning_rate": 4.7812123645122445e-06,
"loss": 0.7411,
"step": 3900
},
{
"epoch": 0.5245550649003078,
"grad_norm": 4.375,
"learning_rate": 4.7544493509969225e-06,
"loss": 0.6028,
"step": 3920
},
{
"epoch": 0.52723136625184,
"grad_norm": 9.6875,
"learning_rate": 4.7276863374816004e-06,
"loss": 0.654,
"step": 3940
},
{
"epoch": 0.5299076676033722,
"grad_norm": 9.25,
"learning_rate": 4.7009233239662784e-06,
"loss": 0.6127,
"step": 3960
},
{
"epoch": 0.5325839689549043,
"grad_norm": 5.875,
"learning_rate": 4.674160310450957e-06,
"loss": 0.5728,
"step": 3980
},
{
"epoch": 0.5352602703064365,
"grad_norm": 19.375,
"learning_rate": 4.647397296935635e-06,
"loss": 0.5786,
"step": 4000
},
{
"epoch": 0.5379365716579687,
"grad_norm": 16.75,
"learning_rate": 4.620634283420313e-06,
"loss": 0.6569,
"step": 4020
},
{
"epoch": 0.5406128730095009,
"grad_norm": 4.46875,
"learning_rate": 4.593871269904991e-06,
"loss": 0.6774,
"step": 4040
},
{
"epoch": 0.5432891743610331,
"grad_norm": 41.75,
"learning_rate": 4.567108256389669e-06,
"loss": 0.603,
"step": 4060
},
{
"epoch": 0.5459654757125653,
"grad_norm": 5.15625,
"learning_rate": 4.540345242874348e-06,
"loss": 0.5679,
"step": 4080
},
{
"epoch": 0.5486417770640974,
"grad_norm": 18.125,
"learning_rate": 4.513582229359027e-06,
"loss": 0.5686,
"step": 4100
},
{
"epoch": 0.5513180784156296,
"grad_norm": 6.90625,
"learning_rate": 4.486819215843705e-06,
"loss": 0.5776,
"step": 4120
},
{
"epoch": 0.5539943797671618,
"grad_norm": 6.15625,
"learning_rate": 4.460056202328383e-06,
"loss": 0.6051,
"step": 4140
},
{
"epoch": 0.556670681118694,
"grad_norm": 8.25,
"learning_rate": 4.433293188813061e-06,
"loss": 0.7062,
"step": 4160
},
{
"epoch": 0.5593469824702262,
"grad_norm": 8.6875,
"learning_rate": 4.406530175297739e-06,
"loss": 0.6409,
"step": 4180
},
{
"epoch": 0.5620232838217584,
"grad_norm": 7.59375,
"learning_rate": 4.379767161782418e-06,
"loss": 0.5696,
"step": 4200
},
{
"epoch": 0.5646995851732906,
"grad_norm": 8.0625,
"learning_rate": 4.353004148267096e-06,
"loss": 0.6228,
"step": 4220
},
{
"epoch": 0.5673758865248227,
"grad_norm": 6.03125,
"learning_rate": 4.326241134751774e-06,
"loss": 0.6532,
"step": 4240
},
{
"epoch": 0.5700521878763549,
"grad_norm": 6.375,
"learning_rate": 4.299478121236452e-06,
"loss": 0.6136,
"step": 4260
},
{
"epoch": 0.5727284892278871,
"grad_norm": 11.9375,
"learning_rate": 4.27271510772113e-06,
"loss": 0.6101,
"step": 4280
},
{
"epoch": 0.5754047905794193,
"grad_norm": 57.25,
"learning_rate": 4.245952094205808e-06,
"loss": 0.6618,
"step": 4300
},
{
"epoch": 0.5780810919309515,
"grad_norm": 40.0,
"learning_rate": 4.219189080690486e-06,
"loss": 0.6603,
"step": 4320
},
{
"epoch": 0.5807573932824837,
"grad_norm": 8.0,
"learning_rate": 4.192426067175164e-06,
"loss": 0.5636,
"step": 4340
},
{
"epoch": 0.5834336946340158,
"grad_norm": 10.6875,
"learning_rate": 4.165663053659842e-06,
"loss": 0.5962,
"step": 4360
},
{
"epoch": 0.586109995985548,
"grad_norm": 17.0,
"learning_rate": 4.13890004014452e-06,
"loss": 0.5037,
"step": 4380
},
{
"epoch": 0.5887862973370801,
"grad_norm": 4.40625,
"learning_rate": 4.112137026629199e-06,
"loss": 0.6774,
"step": 4400
},
{
"epoch": 0.5914625986886123,
"grad_norm": 18.75,
"learning_rate": 4.085374013113877e-06,
"loss": 0.681,
"step": 4420
},
{
"epoch": 0.5941389000401445,
"grad_norm": 15.875,
"learning_rate": 4.058610999598555e-06,
"loss": 0.5419,
"step": 4440
},
{
"epoch": 0.5968152013916767,
"grad_norm": 11.75,
"learning_rate": 4.031847986083233e-06,
"loss": 0.6745,
"step": 4460
},
{
"epoch": 0.5994915027432088,
"grad_norm": 9.375,
"learning_rate": 4.005084972567911e-06,
"loss": 0.5266,
"step": 4480
},
{
"epoch": 0.602167804094741,
"grad_norm": 8.0,
"learning_rate": 3.97832195905259e-06,
"loss": 0.6198,
"step": 4500
},
{
"epoch": 0.6048441054462732,
"grad_norm": 7.90625,
"learning_rate": 3.951558945537268e-06,
"loss": 0.6739,
"step": 4520
},
{
"epoch": 0.6075204067978054,
"grad_norm": 5.625,
"learning_rate": 3.924795932021946e-06,
"loss": 0.6192,
"step": 4540
},
{
"epoch": 0.6101967081493376,
"grad_norm": 7.9375,
"learning_rate": 3.898032918506624e-06,
"loss": 0.548,
"step": 4560
},
{
"epoch": 0.6128730095008698,
"grad_norm": 20.875,
"learning_rate": 3.871269904991302e-06,
"loss": 0.5932,
"step": 4580
},
{
"epoch": 0.6155493108524019,
"grad_norm": 13.8125,
"learning_rate": 3.844506891475981e-06,
"loss": 0.5685,
"step": 4600
},
{
"epoch": 0.6182256122039341,
"grad_norm": 6.71875,
"learning_rate": 3.817743877960659e-06,
"loss": 0.5804,
"step": 4620
},
{
"epoch": 0.6209019135554663,
"grad_norm": 8.375,
"learning_rate": 3.7909808644453367e-06,
"loss": 0.5648,
"step": 4640
},
{
"epoch": 0.6235782149069985,
"grad_norm": 5.875,
"learning_rate": 3.7642178509300147e-06,
"loss": 0.6447,
"step": 4660
},
{
"epoch": 0.6262545162585307,
"grad_norm": 29.625,
"learning_rate": 3.737454837414693e-06,
"loss": 0.6378,
"step": 4680
},
{
"epoch": 0.6289308176100629,
"grad_norm": 5.40625,
"learning_rate": 3.710691823899371e-06,
"loss": 0.6327,
"step": 4700
},
{
"epoch": 0.631607118961595,
"grad_norm": 5.3125,
"learning_rate": 3.68392881038405e-06,
"loss": 0.6192,
"step": 4720
},
{
"epoch": 0.6342834203131272,
"grad_norm": 9.8125,
"learning_rate": 3.657165796868728e-06,
"loss": 0.5407,
"step": 4740
},
{
"epoch": 0.6369597216646594,
"grad_norm": 10.5,
"learning_rate": 3.6304027833534063e-06,
"loss": 0.6072,
"step": 4760
},
{
"epoch": 0.6396360230161916,
"grad_norm": 6.40625,
"learning_rate": 3.6036397698380843e-06,
"loss": 0.5386,
"step": 4780
},
{
"epoch": 0.6423123243677238,
"grad_norm": 13.9375,
"learning_rate": 3.5768767563227623e-06,
"loss": 0.5386,
"step": 4800
},
{
"epoch": 0.644988625719256,
"grad_norm": 7.03125,
"learning_rate": 3.5501137428074407e-06,
"loss": 0.6285,
"step": 4820
},
{
"epoch": 0.6476649270707882,
"grad_norm": 42.5,
"learning_rate": 3.5233507292921187e-06,
"loss": 0.5539,
"step": 4840
},
{
"epoch": 0.6503412284223203,
"grad_norm": 10.875,
"learning_rate": 3.496587715776797e-06,
"loss": 0.6209,
"step": 4860
},
{
"epoch": 0.6530175297738525,
"grad_norm": 9.1875,
"learning_rate": 3.469824702261475e-06,
"loss": 0.6097,
"step": 4880
},
{
"epoch": 0.6556938311253847,
"grad_norm": 42.0,
"learning_rate": 3.443061688746153e-06,
"loss": 0.7998,
"step": 4900
},
{
"epoch": 0.6583701324769169,
"grad_norm": 8.375,
"learning_rate": 3.4162986752308315e-06,
"loss": 0.5952,
"step": 4920
},
{
"epoch": 0.6610464338284491,
"grad_norm": 8.25,
"learning_rate": 3.3895356617155095e-06,
"loss": 0.6066,
"step": 4940
},
{
"epoch": 0.6637227351799813,
"grad_norm": 9.6875,
"learning_rate": 3.362772648200188e-06,
"loss": 0.5866,
"step": 4960
},
{
"epoch": 0.6663990365315134,
"grad_norm": 15.5,
"learning_rate": 3.336009634684866e-06,
"loss": 0.6064,
"step": 4980
},
{
"epoch": 0.6690753378830456,
"grad_norm": 6.0,
"learning_rate": 3.309246621169544e-06,
"loss": 0.6234,
"step": 5000
}
],
"logging_steps": 20,
"max_steps": 7473,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 5000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.164797997056e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}