|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2410, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004149377593360996, |
|
"grad_norm": 24.125060436731786, |
|
"learning_rate": 4.1493775933609963e-08, |
|
"loss": 1.4564, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002074688796680498, |
|
"grad_norm": 25.720644864001628, |
|
"learning_rate": 2.074688796680498e-07, |
|
"loss": 1.441, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004149377593360996, |
|
"grad_norm": 16.740031980768023, |
|
"learning_rate": 4.149377593360996e-07, |
|
"loss": 1.3788, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006224066390041493, |
|
"grad_norm": 8.090924427641259, |
|
"learning_rate": 6.224066390041494e-07, |
|
"loss": 1.2536, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008298755186721992, |
|
"grad_norm": 10.750523135164151, |
|
"learning_rate": 8.298755186721992e-07, |
|
"loss": 1.1494, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01037344398340249, |
|
"grad_norm": 4.4260704046823305, |
|
"learning_rate": 1.037344398340249e-06, |
|
"loss": 1.0322, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.012448132780082987, |
|
"grad_norm": 3.6028288376202897, |
|
"learning_rate": 1.2448132780082988e-06, |
|
"loss": 1.0097, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.014522821576763486, |
|
"grad_norm": 3.3492101537066366, |
|
"learning_rate": 1.4522821576763488e-06, |
|
"loss": 0.9535, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.016597510373443983, |
|
"grad_norm": 3.156893207686817, |
|
"learning_rate": 1.6597510373443984e-06, |
|
"loss": 0.936, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01867219917012448, |
|
"grad_norm": 3.0839350676364625, |
|
"learning_rate": 1.8672199170124482e-06, |
|
"loss": 0.9338, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02074688796680498, |
|
"grad_norm": 3.253051855089456, |
|
"learning_rate": 2.074688796680498e-06, |
|
"loss": 0.9331, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.022821576763485476, |
|
"grad_norm": 2.9754277246213143, |
|
"learning_rate": 2.282157676348548e-06, |
|
"loss": 0.91, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.024896265560165973, |
|
"grad_norm": 3.0041311604910343, |
|
"learning_rate": 2.4896265560165977e-06, |
|
"loss": 0.9078, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.026970954356846474, |
|
"grad_norm": 2.922982455654269, |
|
"learning_rate": 2.6970954356846475e-06, |
|
"loss": 0.8897, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.029045643153526972, |
|
"grad_norm": 3.1076397124321664, |
|
"learning_rate": 2.9045643153526977e-06, |
|
"loss": 0.8842, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03112033195020747, |
|
"grad_norm": 3.116640733229032, |
|
"learning_rate": 3.112033195020747e-06, |
|
"loss": 0.8832, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03319502074688797, |
|
"grad_norm": 3.065455414087227, |
|
"learning_rate": 3.319502074688797e-06, |
|
"loss": 0.8849, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.035269709543568464, |
|
"grad_norm": 6.210638253556159, |
|
"learning_rate": 3.526970954356847e-06, |
|
"loss": 0.8531, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03734439834024896, |
|
"grad_norm": 3.1067057770748527, |
|
"learning_rate": 3.7344398340248965e-06, |
|
"loss": 0.8743, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03941908713692946, |
|
"grad_norm": 3.2092926065349943, |
|
"learning_rate": 3.941908713692946e-06, |
|
"loss": 0.8682, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04149377593360996, |
|
"grad_norm": 3.2868841866625726, |
|
"learning_rate": 4.149377593360996e-06, |
|
"loss": 0.8654, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.043568464730290454, |
|
"grad_norm": 3.173912450520862, |
|
"learning_rate": 4.356846473029046e-06, |
|
"loss": 0.8711, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04564315352697095, |
|
"grad_norm": 3.292340624512203, |
|
"learning_rate": 4.564315352697096e-06, |
|
"loss": 0.865, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04771784232365145, |
|
"grad_norm": 3.237667508827995, |
|
"learning_rate": 4.771784232365146e-06, |
|
"loss": 0.857, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.04979253112033195, |
|
"grad_norm": 2.993376111424828, |
|
"learning_rate": 4.979253112033195e-06, |
|
"loss": 0.8609, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05186721991701245, |
|
"grad_norm": 3.0144215159861174, |
|
"learning_rate": 5.1867219917012455e-06, |
|
"loss": 0.8506, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05394190871369295, |
|
"grad_norm": 3.1651885203236017, |
|
"learning_rate": 5.394190871369295e-06, |
|
"loss": 0.8469, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.056016597510373446, |
|
"grad_norm": 2.993815126331143, |
|
"learning_rate": 5.601659751037345e-06, |
|
"loss": 0.8525, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.058091286307053944, |
|
"grad_norm": 3.213115534992711, |
|
"learning_rate": 5.809128630705395e-06, |
|
"loss": 0.8695, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06016597510373444, |
|
"grad_norm": 2.9733047105138644, |
|
"learning_rate": 6.016597510373444e-06, |
|
"loss": 0.8284, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06224066390041494, |
|
"grad_norm": 3.33589443798142, |
|
"learning_rate": 6.224066390041494e-06, |
|
"loss": 0.8425, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06431535269709543, |
|
"grad_norm": 3.4393756770920025, |
|
"learning_rate": 6.431535269709544e-06, |
|
"loss": 0.8292, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06639004149377593, |
|
"grad_norm": 3.0186499716075503, |
|
"learning_rate": 6.639004149377594e-06, |
|
"loss": 0.8432, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06846473029045644, |
|
"grad_norm": 3.062428850956398, |
|
"learning_rate": 6.846473029045644e-06, |
|
"loss": 0.8353, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07053941908713693, |
|
"grad_norm": 3.055883803082857, |
|
"learning_rate": 7.053941908713694e-06, |
|
"loss": 0.83, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07261410788381743, |
|
"grad_norm": 3.009902849561278, |
|
"learning_rate": 7.261410788381743e-06, |
|
"loss": 0.8306, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.07468879668049792, |
|
"grad_norm": 3.067609683396737, |
|
"learning_rate": 7.468879668049793e-06, |
|
"loss": 0.8174, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07676348547717843, |
|
"grad_norm": 3.24872200689574, |
|
"learning_rate": 7.676348547717844e-06, |
|
"loss": 0.8067, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.07883817427385892, |
|
"grad_norm": 3.0109511105630813, |
|
"learning_rate": 7.883817427385892e-06, |
|
"loss": 0.819, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08091286307053942, |
|
"grad_norm": 2.9825130430999196, |
|
"learning_rate": 8.091286307053943e-06, |
|
"loss": 0.8307, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08298755186721991, |
|
"grad_norm": 3.2721517170057575, |
|
"learning_rate": 8.298755186721992e-06, |
|
"loss": 0.8181, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08506224066390042, |
|
"grad_norm": 3.1295190695677224, |
|
"learning_rate": 8.506224066390042e-06, |
|
"loss": 0.8194, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.08713692946058091, |
|
"grad_norm": 3.0807844478856885, |
|
"learning_rate": 8.713692946058093e-06, |
|
"loss": 0.8245, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08921161825726141, |
|
"grad_norm": 3.004937735529991, |
|
"learning_rate": 8.921161825726142e-06, |
|
"loss": 0.8053, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.0912863070539419, |
|
"grad_norm": 3.0575713009234597, |
|
"learning_rate": 9.128630705394191e-06, |
|
"loss": 0.8342, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.09336099585062241, |
|
"grad_norm": 3.2791342335128433, |
|
"learning_rate": 9.33609958506224e-06, |
|
"loss": 0.8129, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.0954356846473029, |
|
"grad_norm": 3.1149247799731916, |
|
"learning_rate": 9.543568464730292e-06, |
|
"loss": 0.826, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.0975103734439834, |
|
"grad_norm": 3.03634315532753, |
|
"learning_rate": 9.751037344398341e-06, |
|
"loss": 0.7986, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.0995850622406639, |
|
"grad_norm": 2.9801592123411536, |
|
"learning_rate": 9.95850622406639e-06, |
|
"loss": 0.8093, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1016597510373444, |
|
"grad_norm": 3.060217014440955, |
|
"learning_rate": 9.999916085034977e-06, |
|
"loss": 0.8062, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1037344398340249, |
|
"grad_norm": 3.0183415263657314, |
|
"learning_rate": 9.999575185316994e-06, |
|
"loss": 0.7941, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.10580912863070539, |
|
"grad_norm": 2.9090788452988368, |
|
"learning_rate": 9.998972074026074e-06, |
|
"loss": 0.8192, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.1078838174273859, |
|
"grad_norm": 3.1591958629070085, |
|
"learning_rate": 9.998106782793455e-06, |
|
"loss": 0.809, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.10995850622406639, |
|
"grad_norm": 2.9060719717091663, |
|
"learning_rate": 9.996979357000869e-06, |
|
"loss": 0.8089, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.11203319502074689, |
|
"grad_norm": 2.977459915322296, |
|
"learning_rate": 9.995589855778159e-06, |
|
"loss": 0.7924, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.11410788381742738, |
|
"grad_norm": 3.0060551165049265, |
|
"learning_rate": 9.993938352000174e-06, |
|
"loss": 0.8048, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.11618257261410789, |
|
"grad_norm": 3.095190060871042, |
|
"learning_rate": 9.992024932282955e-06, |
|
"loss": 0.7982, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.11825726141078838, |
|
"grad_norm": 2.8752680951865517, |
|
"learning_rate": 9.989849696979188e-06, |
|
"loss": 0.8078, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.12033195020746888, |
|
"grad_norm": 3.0955847953148456, |
|
"learning_rate": 9.987412760172939e-06, |
|
"loss": 0.8299, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.12240663900414937, |
|
"grad_norm": 3.0679937910779183, |
|
"learning_rate": 9.984714249673676e-06, |
|
"loss": 0.7934, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.12448132780082988, |
|
"grad_norm": 2.85972990273023, |
|
"learning_rate": 9.981754307009556e-06, |
|
"loss": 0.7853, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.12655601659751037, |
|
"grad_norm": 2.867471100837704, |
|
"learning_rate": 9.978533087420015e-06, |
|
"loss": 0.7794, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.12863070539419086, |
|
"grad_norm": 2.752209819851923, |
|
"learning_rate": 9.97505075984762e-06, |
|
"loss": 0.7969, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.13070539419087138, |
|
"grad_norm": 2.786720425865695, |
|
"learning_rate": 9.971307506929202e-06, |
|
"loss": 0.7928, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.13278008298755187, |
|
"grad_norm": 2.830559616569594, |
|
"learning_rate": 9.967303524986294e-06, |
|
"loss": 0.7716, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.13485477178423236, |
|
"grad_norm": 2.974198391481815, |
|
"learning_rate": 9.963039024014811e-06, |
|
"loss": 0.7735, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.13692946058091288, |
|
"grad_norm": 2.7461397338315745, |
|
"learning_rate": 9.958514227674065e-06, |
|
"loss": 0.7713, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.13900414937759337, |
|
"grad_norm": 2.923716802390218, |
|
"learning_rate": 9.953729373275008e-06, |
|
"loss": 0.7742, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.14107883817427386, |
|
"grad_norm": 2.8767580278019915, |
|
"learning_rate": 9.9486847117678e-06, |
|
"loss": 0.7811, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.14315352697095435, |
|
"grad_norm": 2.7292776037908806, |
|
"learning_rate": 9.943380507728647e-06, |
|
"loss": 0.7498, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.14522821576763487, |
|
"grad_norm": 2.7779230884935626, |
|
"learning_rate": 9.937817039345924e-06, |
|
"loss": 0.7837, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.14730290456431536, |
|
"grad_norm": 2.8195872317743294, |
|
"learning_rate": 9.931994598405576e-06, |
|
"loss": 0.7698, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.14937759336099585, |
|
"grad_norm": 2.774597685088389, |
|
"learning_rate": 9.925913490275834e-06, |
|
"loss": 0.7563, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.15145228215767634, |
|
"grad_norm": 3.2313550972927603, |
|
"learning_rate": 9.919574033891175e-06, |
|
"loss": 0.7514, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.15352697095435686, |
|
"grad_norm": 2.951208208104408, |
|
"learning_rate": 9.912976561735617e-06, |
|
"loss": 0.7624, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.15560165975103735, |
|
"grad_norm": 2.792776630162223, |
|
"learning_rate": 9.906121419825269e-06, |
|
"loss": 0.7528, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.15767634854771784, |
|
"grad_norm": 3.0013169289174035, |
|
"learning_rate": 9.899008967690185e-06, |
|
"loss": 0.7579, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.15975103734439833, |
|
"grad_norm": 2.93889239671079, |
|
"learning_rate": 9.891639578355511e-06, |
|
"loss": 0.7473, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.16182572614107885, |
|
"grad_norm": 2.8094165441216834, |
|
"learning_rate": 9.88401363832192e-06, |
|
"loss": 0.7461, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.16390041493775934, |
|
"grad_norm": 2.8511200527170826, |
|
"learning_rate": 9.87613154754534e-06, |
|
"loss": 0.7371, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.16597510373443983, |
|
"grad_norm": 2.7083420207399054, |
|
"learning_rate": 9.867993719415974e-06, |
|
"loss": 0.7483, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.16804979253112035, |
|
"grad_norm": 4.619767137175253, |
|
"learning_rate": 9.859600580736632e-06, |
|
"loss": 0.7439, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.17012448132780084, |
|
"grad_norm": 2.773898975248707, |
|
"learning_rate": 9.850952571700332e-06, |
|
"loss": 0.7554, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.17219917012448133, |
|
"grad_norm": 3.0806711904194843, |
|
"learning_rate": 9.842050145867219e-06, |
|
"loss": 0.7166, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.17427385892116182, |
|
"grad_norm": 2.752747132588949, |
|
"learning_rate": 9.832893770140778e-06, |
|
"loss": 0.7197, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.17634854771784234, |
|
"grad_norm": 2.7451571067105007, |
|
"learning_rate": 9.823483924743348e-06, |
|
"loss": 0.7354, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.17842323651452283, |
|
"grad_norm": 2.868073203952391, |
|
"learning_rate": 9.813821103190932e-06, |
|
"loss": 0.7414, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.18049792531120332, |
|
"grad_norm": 2.9147343149718323, |
|
"learning_rate": 9.803905812267317e-06, |
|
"loss": 0.7213, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.1825726141078838, |
|
"grad_norm": 2.943490559854656, |
|
"learning_rate": 9.793738571997488e-06, |
|
"loss": 0.7157, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.18464730290456433, |
|
"grad_norm": 2.786373198270263, |
|
"learning_rate": 9.783319915620365e-06, |
|
"loss": 0.7211, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.18672199170124482, |
|
"grad_norm": 2.828672618199542, |
|
"learning_rate": 9.772650389560829e-06, |
|
"loss": 0.7407, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1887966804979253, |
|
"grad_norm": 2.8503278928189597, |
|
"learning_rate": 9.761730553401067e-06, |
|
"loss": 0.7198, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1908713692946058, |
|
"grad_norm": 2.6415470476311804, |
|
"learning_rate": 9.750560979851222e-06, |
|
"loss": 0.6985, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.19294605809128632, |
|
"grad_norm": 2.7659371682656864, |
|
"learning_rate": 9.739142254719351e-06, |
|
"loss": 0.7173, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1950207468879668, |
|
"grad_norm": 2.878286864671364, |
|
"learning_rate": 9.727474976880718e-06, |
|
"loss": 0.7095, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1970954356846473, |
|
"grad_norm": 2.8820399165119297, |
|
"learning_rate": 9.715559758246363e-06, |
|
"loss": 0.7395, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1991701244813278, |
|
"grad_norm": 2.990832600540869, |
|
"learning_rate": 9.703397223731028e-06, |
|
"loss": 0.7231, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2012448132780083, |
|
"grad_norm": 2.9086624278397584, |
|
"learning_rate": 9.690988011220367e-06, |
|
"loss": 0.7232, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2033195020746888, |
|
"grad_norm": 3.0668269222764053, |
|
"learning_rate": 9.678332771537506e-06, |
|
"loss": 0.7071, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2053941908713693, |
|
"grad_norm": 2.6687504757413394, |
|
"learning_rate": 9.665432168408895e-06, |
|
"loss": 0.7091, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2074688796680498, |
|
"grad_norm": 2.832649973042063, |
|
"learning_rate": 9.652286878429508e-06, |
|
"loss": 0.6909, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2095435684647303, |
|
"grad_norm": 2.8561721336631103, |
|
"learning_rate": 9.638897591027355e-06, |
|
"loss": 0.7018, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.21161825726141079, |
|
"grad_norm": 2.69554265510261, |
|
"learning_rate": 9.625265008427317e-06, |
|
"loss": 0.681, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.21369294605809128, |
|
"grad_norm": 2.9431488000559964, |
|
"learning_rate": 9.61138984561433e-06, |
|
"loss": 0.6728, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2157676348547718, |
|
"grad_norm": 2.83969630330935, |
|
"learning_rate": 9.597272830295877e-06, |
|
"loss": 0.6868, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.21784232365145229, |
|
"grad_norm": 2.674607646561167, |
|
"learning_rate": 9.582914702863816e-06, |
|
"loss": 0.7077, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.21991701244813278, |
|
"grad_norm": 2.725112394328792, |
|
"learning_rate": 9.568316216355569e-06, |
|
"loss": 0.7025, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.22199170124481327, |
|
"grad_norm": 2.884902514990453, |
|
"learning_rate": 9.553478136414606e-06, |
|
"loss": 0.6831, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.22406639004149378, |
|
"grad_norm": 2.733433429575846, |
|
"learning_rate": 9.538401241250302e-06, |
|
"loss": 0.6832, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.22614107883817428, |
|
"grad_norm": 2.581183991932393, |
|
"learning_rate": 9.523086321597123e-06, |
|
"loss": 0.6734, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.22821576763485477, |
|
"grad_norm": 2.8734721689801, |
|
"learning_rate": 9.507534180673142e-06, |
|
"loss": 0.6864, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.23029045643153526, |
|
"grad_norm": 2.663535237541605, |
|
"learning_rate": 9.49174563413793e-06, |
|
"loss": 0.6466, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.23236514522821577, |
|
"grad_norm": 2.7118413673799013, |
|
"learning_rate": 9.475721510049765e-06, |
|
"loss": 0.6837, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.23443983402489627, |
|
"grad_norm": 2.9950923382886323, |
|
"learning_rate": 9.459462648822209e-06, |
|
"loss": 0.6726, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.23651452282157676, |
|
"grad_norm": 2.974293502120668, |
|
"learning_rate": 9.442969903180021e-06, |
|
"loss": 0.6738, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.23858921161825727, |
|
"grad_norm": 3.1783580667348548, |
|
"learning_rate": 9.426244138114456e-06, |
|
"loss": 0.6672, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.24066390041493776, |
|
"grad_norm": 2.6597049002060285, |
|
"learning_rate": 9.409286230837876e-06, |
|
"loss": 0.653, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.24273858921161826, |
|
"grad_norm": 2.7236286099967266, |
|
"learning_rate": 9.39209707073775e-06, |
|
"loss": 0.6711, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.24481327800829875, |
|
"grad_norm": 2.7372295677421916, |
|
"learning_rate": 9.37467755933002e-06, |
|
"loss": 0.6559, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.24688796680497926, |
|
"grad_norm": 2.678242556214004, |
|
"learning_rate": 9.357028610211802e-06, |
|
"loss": 0.6388, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.24896265560165975, |
|
"grad_norm": 2.7817408113891497, |
|
"learning_rate": 9.339151149013483e-06, |
|
"loss": 0.6614, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.25103734439834025, |
|
"grad_norm": 2.7229156677150708, |
|
"learning_rate": 9.32104611335017e-06, |
|
"loss": 0.6622, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.25311203319502074, |
|
"grad_norm": 2.786021124460266, |
|
"learning_rate": 9.302714452772515e-06, |
|
"loss": 0.6362, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2551867219917012, |
|
"grad_norm": 2.718401897174051, |
|
"learning_rate": 9.284157128716916e-06, |
|
"loss": 0.6575, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.2572614107883817, |
|
"grad_norm": 2.748648328603438, |
|
"learning_rate": 9.265375114455091e-06, |
|
"loss": 0.6576, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.25933609958506226, |
|
"grad_norm": 2.618511190024933, |
|
"learning_rate": 9.246369395043033e-06, |
|
"loss": 0.6395, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.26141078838174275, |
|
"grad_norm": 2.648505007931354, |
|
"learning_rate": 9.227140967269348e-06, |
|
"loss": 0.6365, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.26348547717842324, |
|
"grad_norm": 2.699335425831484, |
|
"learning_rate": 9.20769083960298e-06, |
|
"loss": 0.6227, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.26556016597510373, |
|
"grad_norm": 2.734052134594422, |
|
"learning_rate": 9.188020032140308e-06, |
|
"loss": 0.6482, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2676348547717842, |
|
"grad_norm": 2.653396602744242, |
|
"learning_rate": 9.168129576551665e-06, |
|
"loss": 0.6436, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.2697095435684647, |
|
"grad_norm": 2.6329020514792347, |
|
"learning_rate": 9.148020516027207e-06, |
|
"loss": 0.6469, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.2717842323651452, |
|
"grad_norm": 2.77120399617531, |
|
"learning_rate": 9.127693905222223e-06, |
|
"loss": 0.6523, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.27385892116182575, |
|
"grad_norm": 2.5777407355282915, |
|
"learning_rate": 9.107150810201805e-06, |
|
"loss": 0.6128, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.27593360995850624, |
|
"grad_norm": 2.600188088942271, |
|
"learning_rate": 9.086392308384946e-06, |
|
"loss": 0.6328, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.27800829875518673, |
|
"grad_norm": 2.782681918170527, |
|
"learning_rate": 9.065419488488029e-06, |
|
"loss": 0.6375, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2800829875518672, |
|
"grad_norm": 2.7119527542163224, |
|
"learning_rate": 9.044233450467728e-06, |
|
"loss": 0.6518, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.2821576763485477, |
|
"grad_norm": 2.5779208598315893, |
|
"learning_rate": 9.022835305463322e-06, |
|
"loss": 0.6096, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.2842323651452282, |
|
"grad_norm": 2.732004581484615, |
|
"learning_rate": 9.001226175738409e-06, |
|
"loss": 0.6256, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.2863070539419087, |
|
"grad_norm": 2.9082987367989754, |
|
"learning_rate": 8.979407194622062e-06, |
|
"loss": 0.624, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.2883817427385892, |
|
"grad_norm": 2.870233461461753, |
|
"learning_rate": 8.95737950644938e-06, |
|
"loss": 0.6264, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.29045643153526973, |
|
"grad_norm": 2.6890783012042476, |
|
"learning_rate": 8.93514426650147e-06, |
|
"loss": 0.62, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.2925311203319502, |
|
"grad_norm": 2.769255492313238, |
|
"learning_rate": 8.912702640944862e-06, |
|
"loss": 0.6068, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.2946058091286307, |
|
"grad_norm": 2.6949064089337886, |
|
"learning_rate": 8.89005580677034e-06, |
|
"loss": 0.6203, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.2966804979253112, |
|
"grad_norm": 2.755051228153455, |
|
"learning_rate": 8.867204951731227e-06, |
|
"loss": 0.6142, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.2987551867219917, |
|
"grad_norm": 2.62981630367938, |
|
"learning_rate": 8.844151274281074e-06, |
|
"loss": 0.6189, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3008298755186722, |
|
"grad_norm": 2.7265577070263465, |
|
"learning_rate": 8.820895983510813e-06, |
|
"loss": 0.6, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.3029045643153527, |
|
"grad_norm": 2.7817352989102333, |
|
"learning_rate": 8.797440299085344e-06, |
|
"loss": 0.6108, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.3049792531120332, |
|
"grad_norm": 3.100231240034723, |
|
"learning_rate": 8.77378545117957e-06, |
|
"loss": 0.5984, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3070539419087137, |
|
"grad_norm": 2.7380766370471212, |
|
"learning_rate": 8.74993268041387e-06, |
|
"loss": 0.6071, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3091286307053942, |
|
"grad_norm": 2.5723430338949647, |
|
"learning_rate": 8.725883237789046e-06, |
|
"loss": 0.6006, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3112033195020747, |
|
"grad_norm": 2.6247659030224457, |
|
"learning_rate": 8.701638384620694e-06, |
|
"loss": 0.6057, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3132780082987552, |
|
"grad_norm": 2.8261376918581673, |
|
"learning_rate": 8.677199392473068e-06, |
|
"loss": 0.5909, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.3153526970954357, |
|
"grad_norm": 2.687403682214452, |
|
"learning_rate": 8.652567543092385e-06, |
|
"loss": 0.5972, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.31742738589211617, |
|
"grad_norm": 2.583943942287143, |
|
"learning_rate": 8.627744128339599e-06, |
|
"loss": 0.5921, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.31950207468879666, |
|
"grad_norm": 2.5051270540002513, |
|
"learning_rate": 8.602730450122648e-06, |
|
"loss": 0.5874, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3215767634854772, |
|
"grad_norm": 2.516887136317417, |
|
"learning_rate": 8.577527820328176e-06, |
|
"loss": 0.5886, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.3236514522821577, |
|
"grad_norm": 2.640501739376562, |
|
"learning_rate": 8.552137560752728e-06, |
|
"loss": 0.6084, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3257261410788382, |
|
"grad_norm": 2.6052956415011375, |
|
"learning_rate": 8.526561003033424e-06, |
|
"loss": 0.621, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.3278008298755187, |
|
"grad_norm": 2.737125142864526, |
|
"learning_rate": 8.50079948857812e-06, |
|
"loss": 0.5653, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.32987551867219916, |
|
"grad_norm": 2.585419509341338, |
|
"learning_rate": 8.474854368495055e-06, |
|
"loss": 0.581, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.33195020746887965, |
|
"grad_norm": 2.737359013797796, |
|
"learning_rate": 8.44872700352199e-06, |
|
"loss": 0.5854, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.33402489626556015, |
|
"grad_norm": 2.746985747973755, |
|
"learning_rate": 8.422418763954841e-06, |
|
"loss": 0.5838, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.3360995850622407, |
|
"grad_norm": 2.539317796466498, |
|
"learning_rate": 8.395931029575817e-06, |
|
"loss": 0.5869, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.3381742738589212, |
|
"grad_norm": 2.7037637662698883, |
|
"learning_rate": 8.369265189581048e-06, |
|
"loss": 0.5706, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.34024896265560167, |
|
"grad_norm": 2.745265242396823, |
|
"learning_rate": 8.342422642507727e-06, |
|
"loss": 0.5976, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.34232365145228216, |
|
"grad_norm": 2.4976820679304064, |
|
"learning_rate": 8.31540479616076e-06, |
|
"loss": 0.5623, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.34439834024896265, |
|
"grad_norm": 2.668409048058494, |
|
"learning_rate": 8.288213067538936e-06, |
|
"loss": 0.5826, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.34647302904564314, |
|
"grad_norm": 2.6848352140022107, |
|
"learning_rate": 8.260848882760616e-06, |
|
"loss": 0.5866, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.34854771784232363, |
|
"grad_norm": 2.66195019056847, |
|
"learning_rate": 8.233313676988917e-06, |
|
"loss": 0.586, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3506224066390041, |
|
"grad_norm": 2.528461357247491, |
|
"learning_rate": 8.205608894356461e-06, |
|
"loss": 0.5852, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.35269709543568467, |
|
"grad_norm": 2.660016794594734, |
|
"learning_rate": 8.177735987889628e-06, |
|
"loss": 0.564, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.35477178423236516, |
|
"grad_norm": 2.759588415440948, |
|
"learning_rate": 8.149696419432352e-06, |
|
"loss": 0.5654, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.35684647302904565, |
|
"grad_norm": 2.613574353143177, |
|
"learning_rate": 8.121491659569442e-06, |
|
"loss": 0.577, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.35892116182572614, |
|
"grad_norm": 2.7120664267900363, |
|
"learning_rate": 8.093123187549475e-06, |
|
"loss": 0.5521, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.36099585062240663, |
|
"grad_norm": 2.8645366988908703, |
|
"learning_rate": 8.064592491207193e-06, |
|
"loss": 0.5506, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3630705394190871, |
|
"grad_norm": 2.434549054568008, |
|
"learning_rate": 8.035901066885486e-06, |
|
"loss": 0.5542, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.3651452282157676, |
|
"grad_norm": 3.1125245016401366, |
|
"learning_rate": 8.007050419356898e-06, |
|
"loss": 0.5468, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.36721991701244816, |
|
"grad_norm": 2.573761130348133, |
|
"learning_rate": 7.978042061744728e-06, |
|
"loss": 0.5769, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.36929460580912865, |
|
"grad_norm": 2.69636084648785, |
|
"learning_rate": 7.94887751544365e-06, |
|
"loss": 0.555, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.37136929460580914, |
|
"grad_norm": 2.759462337302901, |
|
"learning_rate": 7.919558310039937e-06, |
|
"loss": 0.5621, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.37344398340248963, |
|
"grad_norm": 2.524652074579402, |
|
"learning_rate": 7.890085983231225e-06, |
|
"loss": 0.5595, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3755186721991701, |
|
"grad_norm": 2.6055459072408347, |
|
"learning_rate": 7.860462080745884e-06, |
|
"loss": 0.5625, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.3775933609958506, |
|
"grad_norm": 2.690307321418706, |
|
"learning_rate": 7.830688156261927e-06, |
|
"loss": 0.5669, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3796680497925311, |
|
"grad_norm": 2.4947604643037846, |
|
"learning_rate": 7.800765771325546e-06, |
|
"loss": 0.5513, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.3817427385892116, |
|
"grad_norm": 2.771189691944251, |
|
"learning_rate": 7.7706964952692e-06, |
|
"loss": 0.5545, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.38381742738589214, |
|
"grad_norm": 2.659352317761129, |
|
"learning_rate": 7.740481905129307e-06, |
|
"loss": 0.5395, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.38589211618257263, |
|
"grad_norm": 2.77065352307922, |
|
"learning_rate": 7.710123585563552e-06, |
|
"loss": 0.5318, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3879668049792531, |
|
"grad_norm": 2.67268337504389, |
|
"learning_rate": 7.679623128767754e-06, |
|
"loss": 0.5602, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.3900414937759336, |
|
"grad_norm": 2.6130788687401534, |
|
"learning_rate": 7.648982134392378e-06, |
|
"loss": 0.5407, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.3921161825726141, |
|
"grad_norm": 2.478750734091722, |
|
"learning_rate": 7.618202209458623e-06, |
|
"loss": 0.5512, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.3941908713692946, |
|
"grad_norm": 2.5293381351156947, |
|
"learning_rate": 7.587284968274155e-06, |
|
"loss": 0.532, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3962655601659751, |
|
"grad_norm": 2.669093722804951, |
|
"learning_rate": 7.556232032348429e-06, |
|
"loss": 0.5366, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.3983402489626556, |
|
"grad_norm": 2.632923957296542, |
|
"learning_rate": 7.5250450303076526e-06, |
|
"loss": 0.5457, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.4004149377593361, |
|
"grad_norm": 2.6179192159805043, |
|
"learning_rate": 7.49372559780937e-06, |
|
"loss": 0.5424, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.4024896265560166, |
|
"grad_norm": 3.2478337706512543, |
|
"learning_rate": 7.462275377456671e-06, |
|
"loss": 0.54, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.4045643153526971, |
|
"grad_norm": 2.68193222598923, |
|
"learning_rate": 7.430696018712049e-06, |
|
"loss": 0.5387, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.4066390041493776, |
|
"grad_norm": 2.595437581194786, |
|
"learning_rate": 7.398989177810889e-06, |
|
"loss": 0.5414, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4087136929460581, |
|
"grad_norm": 2.4855222087038156, |
|
"learning_rate": 7.3671565176746025e-06, |
|
"loss": 0.5265, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.4107883817427386, |
|
"grad_norm": 2.508819128058194, |
|
"learning_rate": 7.335199707823415e-06, |
|
"loss": 0.5291, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.41286307053941906, |
|
"grad_norm": 2.531304182611605, |
|
"learning_rate": 7.3031204242888e-06, |
|
"loss": 0.52, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.4149377593360996, |
|
"grad_norm": 2.5517986316072396, |
|
"learning_rate": 7.270920349525584e-06, |
|
"loss": 0.5249, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4170124481327801, |
|
"grad_norm": 2.7125215830864544, |
|
"learning_rate": 7.238601172323701e-06, |
|
"loss": 0.5188, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.4190871369294606, |
|
"grad_norm": 2.721968844943331, |
|
"learning_rate": 7.206164587719627e-06, |
|
"loss": 0.5486, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.4211618257261411, |
|
"grad_norm": 2.498830940304024, |
|
"learning_rate": 7.173612296907473e-06, |
|
"loss": 0.5094, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.42323651452282157, |
|
"grad_norm": 2.6420699168390125, |
|
"learning_rate": 7.1409460071497675e-06, |
|
"loss": 0.5369, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.42531120331950206, |
|
"grad_norm": 2.5735345561399106, |
|
"learning_rate": 7.108167431687917e-06, |
|
"loss": 0.5224, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.42738589211618255, |
|
"grad_norm": 2.553352808391161, |
|
"learning_rate": 7.075278289652349e-06, |
|
"loss": 0.5183, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.42946058091286304, |
|
"grad_norm": 2.5451816463588264, |
|
"learning_rate": 7.042280305972354e-06, |
|
"loss": 0.5115, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.4315352697095436, |
|
"grad_norm": 2.7764624685613324, |
|
"learning_rate": 7.009175211285611e-06, |
|
"loss": 0.5024, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.4336099585062241, |
|
"grad_norm": 2.4994000922678605, |
|
"learning_rate": 6.975964741847427e-06, |
|
"loss": 0.5168, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.43568464730290457, |
|
"grad_norm": 2.6265669713073736, |
|
"learning_rate": 6.942650639439678e-06, |
|
"loss": 0.5241, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.43775933609958506, |
|
"grad_norm": 2.551169790014525, |
|
"learning_rate": 6.9092346512794475e-06, |
|
"loss": 0.5292, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.43983402489626555, |
|
"grad_norm": 2.7490479683201756, |
|
"learning_rate": 6.875718529927404e-06, |
|
"loss": 0.5175, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.44190871369294604, |
|
"grad_norm": 2.589042476536066, |
|
"learning_rate": 6.8421040331958745e-06, |
|
"loss": 0.5136, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.44398340248962653, |
|
"grad_norm": 2.398762511763245, |
|
"learning_rate": 6.808392924056659e-06, |
|
"loss": 0.494, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.4460580912863071, |
|
"grad_norm": 2.4620482622430906, |
|
"learning_rate": 6.774586970548567e-06, |
|
"loss": 0.5142, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.44813278008298757, |
|
"grad_norm": 2.5811787236656705, |
|
"learning_rate": 6.7406879456846875e-06, |
|
"loss": 0.5179, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.45020746887966806, |
|
"grad_norm": 2.5874751704459067, |
|
"learning_rate": 6.7066976273594e-06, |
|
"loss": 0.5015, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.45228215767634855, |
|
"grad_norm": 2.6041890331283004, |
|
"learning_rate": 6.672617798255135e-06, |
|
"loss": 0.5187, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.45435684647302904, |
|
"grad_norm": 2.4385773021667014, |
|
"learning_rate": 6.63845024574887e-06, |
|
"loss": 0.5139, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.45643153526970953, |
|
"grad_norm": 2.6013087624703903, |
|
"learning_rate": 6.604196761818395e-06, |
|
"loss": 0.5151, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.45850622406639, |
|
"grad_norm": 2.507789701163704, |
|
"learning_rate": 6.5698591429483286e-06, |
|
"loss": 0.5055, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.4605809128630705, |
|
"grad_norm": 2.4497240381091894, |
|
"learning_rate": 6.535439190035884e-06, |
|
"loss": 0.4919, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.46265560165975106, |
|
"grad_norm": 2.526171786977918, |
|
"learning_rate": 6.5009387082964405e-06, |
|
"loss": 0.5108, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.46473029045643155, |
|
"grad_norm": 2.5707498945410596, |
|
"learning_rate": 6.466359507168849e-06, |
|
"loss": 0.4901, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.46680497925311204, |
|
"grad_norm": 2.5296178280519355, |
|
"learning_rate": 6.431703400220541e-06, |
|
"loss": 0.496, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.46887966804979253, |
|
"grad_norm": 3.370323732349876, |
|
"learning_rate": 6.396972205052407e-06, |
|
"loss": 0.5021, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.470954356846473, |
|
"grad_norm": 2.442922566613594, |
|
"learning_rate": 6.362167743203474e-06, |
|
"loss": 0.5088, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.4730290456431535, |
|
"grad_norm": 2.6508493698413047, |
|
"learning_rate": 6.327291840055365e-06, |
|
"loss": 0.5034, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.475103734439834, |
|
"grad_norm": 2.350322394886369, |
|
"learning_rate": 6.292346324736578e-06, |
|
"loss": 0.4892, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.47717842323651455, |
|
"grad_norm": 2.4571870424057414, |
|
"learning_rate": 6.2573330300265375e-06, |
|
"loss": 0.4873, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.47925311203319504, |
|
"grad_norm": 2.5404811185739464, |
|
"learning_rate": 6.22225379225948e-06, |
|
"loss": 0.5078, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.48132780082987553, |
|
"grad_norm": 2.4912415819262272, |
|
"learning_rate": 6.18711045122814e-06, |
|
"loss": 0.4999, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.483402489626556, |
|
"grad_norm": 2.4900364753655158, |
|
"learning_rate": 6.151904850087265e-06, |
|
"loss": 0.4973, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.4854771784232365, |
|
"grad_norm": 2.744009584631425, |
|
"learning_rate": 6.116638835256943e-06, |
|
"loss": 0.5012, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.487551867219917, |
|
"grad_norm": 2.495774980304088, |
|
"learning_rate": 6.081314256325762e-06, |
|
"loss": 0.4776, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.4896265560165975, |
|
"grad_norm": 2.6683119283640973, |
|
"learning_rate": 6.045932965953813e-06, |
|
"loss": 0.4934, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.491701244813278, |
|
"grad_norm": 2.5945008502082927, |
|
"learning_rate": 6.010496819775518e-06, |
|
"loss": 0.4778, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.49377593360995853, |
|
"grad_norm": 2.508630781477287, |
|
"learning_rate": 5.9750076763023025e-06, |
|
"loss": 0.4996, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.495850622406639, |
|
"grad_norm": 2.3857145720709956, |
|
"learning_rate": 5.939467396825137e-06, |
|
"loss": 0.4734, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.4979253112033195, |
|
"grad_norm": 2.440278471365285, |
|
"learning_rate": 5.903877845316906e-06, |
|
"loss": 0.4727, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.5440939022628215, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.4819, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.5020746887966805, |
|
"grad_norm": 2.55089097987319, |
|
"learning_rate": 5.832558394921688e-06, |
|
"loss": 0.4813, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.504149377593361, |
|
"grad_norm": 2.5084373852958994, |
|
"learning_rate": 5.796832236509556e-06, |
|
"loss": 0.4805, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.5062240663900415, |
|
"grad_norm": 2.5202246258660774, |
|
"learning_rate": 5.761064286819895e-06, |
|
"loss": 0.4681, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.508298755186722, |
|
"grad_norm": 2.2907682658127757, |
|
"learning_rate": 5.725256421766158e-06, |
|
"loss": 0.465, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.5103734439834025, |
|
"grad_norm": 2.4810650126563787, |
|
"learning_rate": 5.689410519355226e-06, |
|
"loss": 0.4727, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.5124481327800829, |
|
"grad_norm": 2.3126556102699687, |
|
"learning_rate": 5.653528459588925e-06, |
|
"loss": 0.4685, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.5145228215767634, |
|
"grad_norm": 2.8161059758067646, |
|
"learning_rate": 5.617612124365411e-06, |
|
"loss": 0.4782, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.516597510373444, |
|
"grad_norm": 2.460454461172899, |
|
"learning_rate": 5.5816633973804766e-06, |
|
"loss": 0.4725, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.5186721991701245, |
|
"grad_norm": 2.5314852355237325, |
|
"learning_rate": 5.545684164028764e-06, |
|
"loss": 0.4629, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.520746887966805, |
|
"grad_norm": 2.413381971072284, |
|
"learning_rate": 5.509676311304869e-06, |
|
"loss": 0.4621, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.5228215767634855, |
|
"grad_norm": 2.4568884384653065, |
|
"learning_rate": 5.4736417277043865e-06, |
|
"loss": 0.4641, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.524896265560166, |
|
"grad_norm": 2.5430525728774995, |
|
"learning_rate": 5.4375823031248545e-06, |
|
"loss": 0.4608, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.5269709543568465, |
|
"grad_norm": 2.5499074836842417, |
|
"learning_rate": 5.401499928766644e-06, |
|
"loss": 0.4585, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.529045643153527, |
|
"grad_norm": 2.6183095454562757, |
|
"learning_rate": 5.365396497033763e-06, |
|
"loss": 0.4596, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.5311203319502075, |
|
"grad_norm": 2.4698682040573154, |
|
"learning_rate": 5.32927390143462e-06, |
|
"loss": 0.4678, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.533195020746888, |
|
"grad_norm": 2.492163480027872, |
|
"learning_rate": 5.293134036482697e-06, |
|
"loss": 0.4456, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.5352697095435685, |
|
"grad_norm": 2.590019804752622, |
|
"learning_rate": 5.256978797597202e-06, |
|
"loss": 0.46, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.5373443983402489, |
|
"grad_norm": 2.587584613385466, |
|
"learning_rate": 5.220810081003656e-06, |
|
"loss": 0.4452, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.5394190871369294, |
|
"grad_norm": 2.5553820105852703, |
|
"learning_rate": 5.184629783634441e-06, |
|
"loss": 0.4669, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5414937759336099, |
|
"grad_norm": 2.5316219134992726, |
|
"learning_rate": 5.1484398030293135e-06, |
|
"loss": 0.4427, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.5435684647302904, |
|
"grad_norm": 2.38261182138307, |
|
"learning_rate": 5.112242037235885e-06, |
|
"loss": 0.4545, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.5456431535269709, |
|
"grad_norm": 2.4329470762572405, |
|
"learning_rate": 5.076038384710077e-06, |
|
"loss": 0.4355, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.5477178423236515, |
|
"grad_norm": 2.4125319997767978, |
|
"learning_rate": 5.039830744216548e-06, |
|
"loss": 0.4664, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.549792531120332, |
|
"grad_norm": 2.4953706452375655, |
|
"learning_rate": 5.003621014729113e-06, |
|
"loss": 0.4419, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.5518672199170125, |
|
"grad_norm": 2.422996969955173, |
|
"learning_rate": 4.967411095331149e-06, |
|
"loss": 0.4616, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.553941908713693, |
|
"grad_norm": 2.4302198476762222, |
|
"learning_rate": 4.931202885115994e-06, |
|
"loss": 0.4614, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.5560165975103735, |
|
"grad_norm": 2.3754573953652085, |
|
"learning_rate": 4.894998283087341e-06, |
|
"loss": 0.4434, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.558091286307054, |
|
"grad_norm": 2.6159761478664616, |
|
"learning_rate": 4.858799188059651e-06, |
|
"loss": 0.4484, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.5601659751037344, |
|
"grad_norm": 2.540935707705274, |
|
"learning_rate": 4.822607498558555e-06, |
|
"loss": 0.4617, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.5622406639004149, |
|
"grad_norm": 2.4320603166979726, |
|
"learning_rate": 4.786425112721293e-06, |
|
"loss": 0.449, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.5643153526970954, |
|
"grad_norm": 2.4700798191836277, |
|
"learning_rate": 4.75025392819715e-06, |
|
"loss": 0.4478, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.5663900414937759, |
|
"grad_norm": 2.4072388887626213, |
|
"learning_rate": 4.714095842047952e-06, |
|
"loss": 0.4435, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.5684647302904564, |
|
"grad_norm": 2.5048051939835703, |
|
"learning_rate": 4.677952750648544e-06, |
|
"loss": 0.4405, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.5705394190871369, |
|
"grad_norm": 2.5106558840148074, |
|
"learning_rate": 4.641826549587352e-06, |
|
"loss": 0.4566, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.5726141078838174, |
|
"grad_norm": 2.504901689251207, |
|
"learning_rate": 4.605719133566955e-06, |
|
"loss": 0.4453, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5746887966804979, |
|
"grad_norm": 2.340222975660526, |
|
"learning_rate": 4.56963239630472e-06, |
|
"loss": 0.4405, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.5767634854771784, |
|
"grad_norm": 2.4499282223501764, |
|
"learning_rate": 4.533568230433477e-06, |
|
"loss": 0.4445, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.578838174273859, |
|
"grad_norm": 2.5670561935310485, |
|
"learning_rate": 4.497528527402262e-06, |
|
"loss": 0.4532, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.5809128630705395, |
|
"grad_norm": 2.3446371876460264, |
|
"learning_rate": 4.461515177377113e-06, |
|
"loss": 0.4325, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.58298755186722, |
|
"grad_norm": 2.5175147851360076, |
|
"learning_rate": 4.42553006914194e-06, |
|
"loss": 0.4498, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.5850622406639004, |
|
"grad_norm": 2.4962663431117122, |
|
"learning_rate": 4.3895750899994566e-06, |
|
"loss": 0.4393, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5871369294605809, |
|
"grad_norm": 2.369862593209211, |
|
"learning_rate": 4.353652125672208e-06, |
|
"loss": 0.4212, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.5892116182572614, |
|
"grad_norm": 2.3773654322008637, |
|
"learning_rate": 4.317763060203665e-06, |
|
"loss": 0.4263, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.5912863070539419, |
|
"grad_norm": 2.380500234905241, |
|
"learning_rate": 4.281909775859411e-06, |
|
"loss": 0.4312, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.5933609958506224, |
|
"grad_norm": 2.345968436608752, |
|
"learning_rate": 4.246094153028426e-06, |
|
"loss": 0.4187, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.5954356846473029, |
|
"grad_norm": 2.400948214509533, |
|
"learning_rate": 4.210318070124465e-06, |
|
"loss": 0.418, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.5975103734439834, |
|
"grad_norm": 2.371660926773831, |
|
"learning_rate": 4.1745834034875435e-06, |
|
"loss": 0.4303, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5995850622406639, |
|
"grad_norm": 2.476793126799922, |
|
"learning_rate": 4.138892027285525e-06, |
|
"loss": 0.4272, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.6016597510373444, |
|
"grad_norm": 2.3212962971421196, |
|
"learning_rate": 4.10324581341583e-06, |
|
"loss": 0.4274, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.6037344398340249, |
|
"grad_norm": 2.33053654370857, |
|
"learning_rate": 4.067646631407259e-06, |
|
"loss": 0.4222, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.6058091286307054, |
|
"grad_norm": 2.4160162947799577, |
|
"learning_rate": 4.0320963483219485e-06, |
|
"loss": 0.4391, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6078838174273858, |
|
"grad_norm": 2.3789046213284584, |
|
"learning_rate": 3.996596828657437e-06, |
|
"loss": 0.4226, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.6099585062240664, |
|
"grad_norm": 2.5623488904932774, |
|
"learning_rate": 3.961149934248893e-06, |
|
"loss": 0.4245, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.6120331950207469, |
|
"grad_norm": 2.3691492683401307, |
|
"learning_rate": 3.925757524171455e-06, |
|
"loss": 0.4178, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.6141078838174274, |
|
"grad_norm": 2.54407142220368, |
|
"learning_rate": 3.8904214546427355e-06, |
|
"loss": 0.4331, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6161825726141079, |
|
"grad_norm": 2.3971670050354232, |
|
"learning_rate": 3.855143578925468e-06, |
|
"loss": 0.418, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.6182572614107884, |
|
"grad_norm": 2.377993230868326, |
|
"learning_rate": 3.819925747230309e-06, |
|
"loss": 0.4223, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6203319502074689, |
|
"grad_norm": 2.3704864112489967, |
|
"learning_rate": 3.7847698066187975e-06, |
|
"loss": 0.4237, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.6224066390041494, |
|
"grad_norm": 2.3438063234927817, |
|
"learning_rate": 3.749677600906489e-06, |
|
"loss": 0.421, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6244813278008299, |
|
"grad_norm": 2.3587251515529504, |
|
"learning_rate": 3.7146509705662453e-06, |
|
"loss": 0.4275, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.6265560165975104, |
|
"grad_norm": 2.4384917069243683, |
|
"learning_rate": 3.6796917526317153e-06, |
|
"loss": 0.4146, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.6286307053941909, |
|
"grad_norm": 2.320300291694463, |
|
"learning_rate": 3.6448017806009804e-06, |
|
"loss": 0.4311, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.6307053941908713, |
|
"grad_norm": 2.6000582980659104, |
|
"learning_rate": 3.609982884340402e-06, |
|
"loss": 0.4223, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6327800829875518, |
|
"grad_norm": 2.3040397099849894, |
|
"learning_rate": 3.575236889988646e-06, |
|
"loss": 0.417, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.6348547717842323, |
|
"grad_norm": 2.374493352143433, |
|
"learning_rate": 3.540565619860906e-06, |
|
"loss": 0.4144, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.6369294605809128, |
|
"grad_norm": 2.3145073297614, |
|
"learning_rate": 3.5059708923533354e-06, |
|
"loss": 0.3986, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.6390041493775933, |
|
"grad_norm": 2.4211124213820434, |
|
"learning_rate": 3.4714545218476727e-06, |
|
"loss": 0.4043, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.6410788381742739, |
|
"grad_norm": 2.4175056654596667, |
|
"learning_rate": 3.437018318616084e-06, |
|
"loss": 0.4174, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.6431535269709544, |
|
"grad_norm": 2.380521886079113, |
|
"learning_rate": 3.4026640887262196e-06, |
|
"loss": 0.411, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.6452282157676349, |
|
"grad_norm": 2.392698108885093, |
|
"learning_rate": 3.3683936339464957e-06, |
|
"loss": 0.4151, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.6473029045643154, |
|
"grad_norm": 2.4192008257860067, |
|
"learning_rate": 3.334208751651593e-06, |
|
"loss": 0.4084, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.6493775933609959, |
|
"grad_norm": 2.4849798192814685, |
|
"learning_rate": 3.300111234728191e-06, |
|
"loss": 0.408, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.6514522821576764, |
|
"grad_norm": 2.5234071594670526, |
|
"learning_rate": 3.2661028714809405e-06, |
|
"loss": 0.4152, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.6535269709543569, |
|
"grad_norm": 2.489346307863909, |
|
"learning_rate": 3.2321854455386657e-06, |
|
"loss": 0.4025, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.6556016597510373, |
|
"grad_norm": 2.46737093745338, |
|
"learning_rate": 3.198360735760827e-06, |
|
"loss": 0.3968, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.6576763485477178, |
|
"grad_norm": 2.4410239838789307, |
|
"learning_rate": 3.1646305161442183e-06, |
|
"loss": 0.4204, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.6597510373443983, |
|
"grad_norm": 2.4139081682249257, |
|
"learning_rate": 3.1309965557299303e-06, |
|
"loss": 0.4079, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.6618257261410788, |
|
"grad_norm": 2.3384724272454993, |
|
"learning_rate": 3.097460618510571e-06, |
|
"loss": 0.4045, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.6639004149377593, |
|
"grad_norm": 2.3896026198992084, |
|
"learning_rate": 3.064024463337747e-06, |
|
"loss": 0.4108, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.6659751037344398, |
|
"grad_norm": 2.329258899943861, |
|
"learning_rate": 3.0306898438298184e-06, |
|
"loss": 0.3989, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.6680497925311203, |
|
"grad_norm": 2.31692305602621, |
|
"learning_rate": 2.997458508279928e-06, |
|
"loss": 0.4015, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.6701244813278008, |
|
"grad_norm": 2.398883002631248, |
|
"learning_rate": 2.964332199564309e-06, |
|
"loss": 0.406, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.6721991701244814, |
|
"grad_norm": 2.3813147909469974, |
|
"learning_rate": 2.9313126550508762e-06, |
|
"loss": 0.3944, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.6742738589211619, |
|
"grad_norm": 2.4906895900662707, |
|
"learning_rate": 2.8984016065081073e-06, |
|
"loss": 0.3983, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.6763485477178424, |
|
"grad_norm": 2.3624233464918025, |
|
"learning_rate": 2.865600780014216e-06, |
|
"loss": 0.3982, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.6784232365145229, |
|
"grad_norm": 2.290981869465899, |
|
"learning_rate": 2.8329118958666236e-06, |
|
"loss": 0.3943, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.6804979253112033, |
|
"grad_norm": 2.5357078369662296, |
|
"learning_rate": 2.80033666849174e-06, |
|
"loss": 0.3915, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.6825726141078838, |
|
"grad_norm": 2.4515705414791404, |
|
"learning_rate": 2.7678768063550454e-06, |
|
"loss": 0.3888, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.6846473029045643, |
|
"grad_norm": 2.2840437643430684, |
|
"learning_rate": 2.735534011871479e-06, |
|
"loss": 0.3919, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6867219917012448, |
|
"grad_norm": 2.341596589765756, |
|
"learning_rate": 2.7033099813161696e-06, |
|
"loss": 0.3877, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.6887966804979253, |
|
"grad_norm": 2.948965471363268, |
|
"learning_rate": 2.6712064047354515e-06, |
|
"loss": 0.3939, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6908713692946058, |
|
"grad_norm": 2.3555666092707184, |
|
"learning_rate": 2.6392249658582454e-06, |
|
"loss": 0.4024, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.6929460580912863, |
|
"grad_norm": 2.27253281463788, |
|
"learning_rate": 2.607367342007738e-06, |
|
"loss": 0.3927, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6950207468879668, |
|
"grad_norm": 2.3535906378617995, |
|
"learning_rate": 2.5756352040134193e-06, |
|
"loss": 0.389, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.6970954356846473, |
|
"grad_norm": 2.4680000171077365, |
|
"learning_rate": 2.5440302161234542e-06, |
|
"loss": 0.3931, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6991701244813278, |
|
"grad_norm": 2.3968940072481875, |
|
"learning_rate": 2.5125540359173893e-06, |
|
"loss": 0.4014, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.7012448132780082, |
|
"grad_norm": 2.225325096703117, |
|
"learning_rate": 2.481208314219233e-06, |
|
"loss": 0.386, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.7033195020746889, |
|
"grad_norm": 2.2371315958217495, |
|
"learning_rate": 2.449994695010856e-06, |
|
"loss": 0.3936, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.7053941908713693, |
|
"grad_norm": 2.352909438284376, |
|
"learning_rate": 2.4189148153457875e-06, |
|
"loss": 0.3742, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7074688796680498, |
|
"grad_norm": 2.324746103200888, |
|
"learning_rate": 2.387970305263349e-06, |
|
"loss": 0.3901, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.7095435684647303, |
|
"grad_norm": 2.42640792043005, |
|
"learning_rate": 2.3571627877031596e-06, |
|
"loss": 0.3846, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.7116182572614108, |
|
"grad_norm": 2.3023475330636116, |
|
"learning_rate": 2.326493878420028e-06, |
|
"loss": 0.3884, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.7136929460580913, |
|
"grad_norm": 2.335339104677645, |
|
"learning_rate": 2.295965185899205e-06, |
|
"loss": 0.3847, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.7157676348547718, |
|
"grad_norm": 2.333414853323239, |
|
"learning_rate": 2.265578311272021e-06, |
|
"loss": 0.3873, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.7178423236514523, |
|
"grad_norm": 2.454935296647717, |
|
"learning_rate": 2.2353348482319233e-06, |
|
"loss": 0.3839, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.7199170124481328, |
|
"grad_norm": 2.309156703996968, |
|
"learning_rate": 2.2052363829508776e-06, |
|
"loss": 0.3798, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.7219917012448133, |
|
"grad_norm": 2.2815947622504176, |
|
"learning_rate": 2.1752844939961926e-06, |
|
"loss": 0.3719, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7240663900414938, |
|
"grad_norm": 2.4188650927693796, |
|
"learning_rate": 2.1454807522477128e-06, |
|
"loss": 0.3824, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.7261410788381742, |
|
"grad_norm": 2.3776289982984826, |
|
"learning_rate": 2.1158267208154497e-06, |
|
"loss": 0.3828, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.7282157676348547, |
|
"grad_norm": 2.358944220082303, |
|
"learning_rate": 2.0863239549575865e-06, |
|
"loss": 0.3811, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.7302904564315352, |
|
"grad_norm": 2.3471654715774295, |
|
"learning_rate": 2.0569740019989136e-06, |
|
"loss": 0.3987, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.7323651452282157, |
|
"grad_norm": 2.265178964057708, |
|
"learning_rate": 2.0277784012496865e-06, |
|
"loss": 0.3784, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.7344398340248963, |
|
"grad_norm": 2.4270537705788406, |
|
"learning_rate": 1.998738683924875e-06, |
|
"loss": 0.3683, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.7365145228215768, |
|
"grad_norm": 2.2934927093372366, |
|
"learning_rate": 1.9698563730638794e-06, |
|
"loss": 0.3816, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.7385892116182573, |
|
"grad_norm": 2.376416809425858, |
|
"learning_rate": 1.9411329834506286e-06, |
|
"loss": 0.3723, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.7406639004149378, |
|
"grad_norm": 2.3377136233793383, |
|
"learning_rate": 1.9125700215341476e-06, |
|
"loss": 0.376, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.7427385892116183, |
|
"grad_norm": 2.4551220885499596, |
|
"learning_rate": 1.8841689853495516e-06, |
|
"loss": 0.3663, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.7448132780082988, |
|
"grad_norm": 2.2729357349496984, |
|
"learning_rate": 1.8559313644394677e-06, |
|
"loss": 0.3765, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.7468879668049793, |
|
"grad_norm": 2.277303752934146, |
|
"learning_rate": 1.827858639775925e-06, |
|
"loss": 0.3761, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.7489626556016598, |
|
"grad_norm": 2.4340959703803606, |
|
"learning_rate": 1.7999522836826744e-06, |
|
"loss": 0.3729, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.7510373443983402, |
|
"grad_norm": 2.314473275008274, |
|
"learning_rate": 1.7722137597579698e-06, |
|
"loss": 0.3759, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.7531120331950207, |
|
"grad_norm": 2.379798639547255, |
|
"learning_rate": 1.744644522797817e-06, |
|
"loss": 0.3678, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.7551867219917012, |
|
"grad_norm": 2.2785597450829504, |
|
"learning_rate": 1.7172460187196588e-06, |
|
"loss": 0.3717, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.7572614107883817, |
|
"grad_norm": 2.405952696472988, |
|
"learning_rate": 1.6900196844865575e-06, |
|
"loss": 0.3713, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.7593360995850622, |
|
"grad_norm": 2.37888022354019, |
|
"learning_rate": 1.6629669480318166e-06, |
|
"loss": 0.3684, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.7614107883817427, |
|
"grad_norm": 2.2262459121984475, |
|
"learning_rate": 1.6360892281841007e-06, |
|
"loss": 0.3693, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.7634854771784232, |
|
"grad_norm": 2.455900450875957, |
|
"learning_rate": 1.609387934593019e-06, |
|
"loss": 0.3775, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.7655601659751037, |
|
"grad_norm": 2.562567328298204, |
|
"learning_rate": 1.5828644676551892e-06, |
|
"loss": 0.364, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.7676348547717843, |
|
"grad_norm": 2.1962225027572964, |
|
"learning_rate": 1.5565202184408e-06, |
|
"loss": 0.3627, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.7697095435684648, |
|
"grad_norm": 2.363565976616129, |
|
"learning_rate": 1.5303565686206452e-06, |
|
"loss": 0.3782, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.7717842323651453, |
|
"grad_norm": 2.3844840627822905, |
|
"learning_rate": 1.5043748903936672e-06, |
|
"loss": 0.3834, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.7738589211618258, |
|
"grad_norm": 2.4786424291101614, |
|
"learning_rate": 1.4785765464149836e-06, |
|
"loss": 0.3817, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.7759336099585062, |
|
"grad_norm": 2.189479317102701, |
|
"learning_rate": 1.4529628897244214e-06, |
|
"loss": 0.3674, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.7780082987551867, |
|
"grad_norm": 2.402183807863865, |
|
"learning_rate": 1.4275352636755613e-06, |
|
"loss": 0.3794, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.7800829875518672, |
|
"grad_norm": 2.2239150752027412, |
|
"learning_rate": 1.4022950018652703e-06, |
|
"loss": 0.3733, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.7821576763485477, |
|
"grad_norm": 2.3394068880707013, |
|
"learning_rate": 1.3772434280637737e-06, |
|
"loss": 0.3668, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.7842323651452282, |
|
"grad_norm": 2.397344916999858, |
|
"learning_rate": 1.3523818561452145e-06, |
|
"loss": 0.3578, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.7863070539419087, |
|
"grad_norm": 2.471162912805464, |
|
"learning_rate": 1.3277115900187515e-06, |
|
"loss": 0.3814, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.7883817427385892, |
|
"grad_norm": 2.39948483174554, |
|
"learning_rate": 1.3032339235601749e-06, |
|
"loss": 0.3733, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.7904564315352697, |
|
"grad_norm": 2.2661498432515557, |
|
"learning_rate": 1.2789501405440397e-06, |
|
"loss": 0.362, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.7925311203319502, |
|
"grad_norm": 2.319705838336872, |
|
"learning_rate": 1.254861514576348e-06, |
|
"loss": 0.3594, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.7946058091286307, |
|
"grad_norm": 2.3000112712614262, |
|
"learning_rate": 1.2309693090277392e-06, |
|
"loss": 0.3696, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.7966804979253111, |
|
"grad_norm": 2.464569747900305, |
|
"learning_rate": 1.207274776967235e-06, |
|
"loss": 0.3655, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.7987551867219918, |
|
"grad_norm": 2.3027575679624666, |
|
"learning_rate": 1.183779161096526e-06, |
|
"loss": 0.3626, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.8008298755186722, |
|
"grad_norm": 2.5859947274602595, |
|
"learning_rate": 1.1604836936847852e-06, |
|
"loss": 0.3604, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.8029045643153527, |
|
"grad_norm": 2.34463093033354, |
|
"learning_rate": 1.1373895965040522e-06, |
|
"loss": 0.3568, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.8049792531120332, |
|
"grad_norm": 2.3024287041379115, |
|
"learning_rate": 1.1144980807651413e-06, |
|
"loss": 0.3669, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.8070539419087137, |
|
"grad_norm": 2.4360264910170244, |
|
"learning_rate": 1.0918103470541297e-06, |
|
"loss": 0.363, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.8091286307053942, |
|
"grad_norm": 2.4832680691474285, |
|
"learning_rate": 1.069327585269383e-06, |
|
"loss": 0.3638, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.8112033195020747, |
|
"grad_norm": 2.321138551124523, |
|
"learning_rate": 1.047050974559149e-06, |
|
"loss": 0.3592, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.8132780082987552, |
|
"grad_norm": 2.5132329570801484, |
|
"learning_rate": 1.024981683259723e-06, |
|
"loss": 0.3574, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.8153526970954357, |
|
"grad_norm": 2.3161649116661844, |
|
"learning_rate": 1.0031208688341609e-06, |
|
"loss": 0.3554, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.8174273858921162, |
|
"grad_norm": 2.3983392594767112, |
|
"learning_rate": 9.814696778115834e-07, |
|
"loss": 0.3588, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.8195020746887967, |
|
"grad_norm": 2.3551041439235325, |
|
"learning_rate": 9.600292457270416e-07, |
|
"loss": 0.3546, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.8215767634854771, |
|
"grad_norm": 2.3212325454458576, |
|
"learning_rate": 9.388006970619557e-07, |
|
"loss": 0.3548, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.8236514522821576, |
|
"grad_norm": 2.211760368780073, |
|
"learning_rate": 9.177851451851511e-07, |
|
"loss": 0.3481, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.8257261410788381, |
|
"grad_norm": 2.4588035318455965, |
|
"learning_rate": 8.969836922944536e-07, |
|
"loss": 0.3559, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.8278008298755186, |
|
"grad_norm": 2.368676437808403, |
|
"learning_rate": 8.76397429358895e-07, |
|
"loss": 0.3541, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.8298755186721992, |
|
"grad_norm": 2.3589512875611276, |
|
"learning_rate": 8.560274360614829e-07, |
|
"loss": 0.3603, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.8319502074688797, |
|
"grad_norm": 2.356963775441445, |
|
"learning_rate": 8.358747807425827e-07, |
|
"loss": 0.3484, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.8340248962655602, |
|
"grad_norm": 2.3220690567008133, |
|
"learning_rate": 8.159405203438875e-07, |
|
"loss": 0.3464, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.8360995850622407, |
|
"grad_norm": 2.359775719598123, |
|
"learning_rate": 7.962257003529777e-07, |
|
"loss": 0.3548, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.8381742738589212, |
|
"grad_norm": 2.4505802422663274, |
|
"learning_rate": 7.767313547484979e-07, |
|
"loss": 0.3701, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.8402489626556017, |
|
"grad_norm": 2.545107107586247, |
|
"learning_rate": 7.574585059459171e-07, |
|
"loss": 0.3673, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.8423236514522822, |
|
"grad_norm": 2.3813325946269353, |
|
"learning_rate": 7.384081647439184e-07, |
|
"loss": 0.3636, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.8443983402489627, |
|
"grad_norm": 2.163299487853864, |
|
"learning_rate": 7.195813302713761e-07, |
|
"loss": 0.343, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.8464730290456431, |
|
"grad_norm": 2.2370604471344766, |
|
"learning_rate": 7.009789899349567e-07, |
|
"loss": 0.3529, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.8485477178423236, |
|
"grad_norm": 2.24681844246371, |
|
"learning_rate": 6.826021193673416e-07, |
|
"loss": 0.3568, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.8506224066390041, |
|
"grad_norm": 2.3636465203484716, |
|
"learning_rate": 6.644516823760439e-07, |
|
"loss": 0.3377, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.8526970954356846, |
|
"grad_norm": 2.4216229448578175, |
|
"learning_rate": 6.465286308928676e-07, |
|
"loss": 0.3498, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.8547717842323651, |
|
"grad_norm": 2.282038633513587, |
|
"learning_rate": 6.28833904923985e-07, |
|
"loss": 0.3493, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.8568464730290456, |
|
"grad_norm": 2.2993596315955913, |
|
"learning_rate": 6.113684325006286e-07, |
|
"loss": 0.3344, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.8589211618257261, |
|
"grad_norm": 2.2449126132746224, |
|
"learning_rate": 5.941331296304254e-07, |
|
"loss": 0.3512, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.8609958506224067, |
|
"grad_norm": 2.3104826448333284, |
|
"learning_rate": 5.771289002493502e-07, |
|
"loss": 0.344, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.8630705394190872, |
|
"grad_norm": 2.398245230670162, |
|
"learning_rate": 5.603566361743229e-07, |
|
"loss": 0.3536, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.8651452282157677, |
|
"grad_norm": 2.49179100334087, |
|
"learning_rate": 5.438172170564288e-07, |
|
"loss": 0.3518, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.8672199170124482, |
|
"grad_norm": 2.4675507681520137, |
|
"learning_rate": 5.275115103347884e-07, |
|
"loss": 0.3456, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.8692946058091287, |
|
"grad_norm": 2.374146536522821, |
|
"learning_rate": 5.114403711910631e-07, |
|
"loss": 0.3538, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.8713692946058091, |
|
"grad_norm": 2.488354926145778, |
|
"learning_rate": 4.956046425045991e-07, |
|
"loss": 0.3434, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.8734439834024896, |
|
"grad_norm": 2.2696546770681936, |
|
"learning_rate": 4.80005154808228e-07, |
|
"loss": 0.3535, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.8755186721991701, |
|
"grad_norm": 2.3833842319160534, |
|
"learning_rate": 4.646427262447034e-07, |
|
"loss": 0.3536, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.8775933609958506, |
|
"grad_norm": 2.2510965407566905, |
|
"learning_rate": 4.49518162523791e-07, |
|
"loss": 0.3352, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.8796680497925311, |
|
"grad_norm": 2.417330217247304, |
|
"learning_rate": 4.34632256880016e-07, |
|
"loss": 0.3468, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.8817427385892116, |
|
"grad_norm": 2.4035451499339953, |
|
"learning_rate": 4.1998579003105553e-07, |
|
"loss": 0.3394, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.8838174273858921, |
|
"grad_norm": 2.2855129915915957, |
|
"learning_rate": 4.0557953013679873e-07, |
|
"loss": 0.3453, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.8858921161825726, |
|
"grad_norm": 2.413842190805828, |
|
"learning_rate": 3.914142327590531e-07, |
|
"loss": 0.3555, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.8879668049792531, |
|
"grad_norm": 2.4223858719206413, |
|
"learning_rate": 3.7749064082191976e-07, |
|
"loss": 0.3393, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.8900414937759336, |
|
"grad_norm": 2.4330635286410427, |
|
"learning_rate": 3.6380948457283293e-07, |
|
"loss": 0.348, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.8921161825726142, |
|
"grad_norm": 2.360637019485879, |
|
"learning_rate": 3.5037148154425494e-07, |
|
"loss": 0.3415, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.8941908713692946, |
|
"grad_norm": 2.2772502464658877, |
|
"learning_rate": 3.3717733651604967e-07, |
|
"loss": 0.351, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.8962655601659751, |
|
"grad_norm": 2.1584946641381193, |
|
"learning_rate": 3.242277414785128e-07, |
|
"loss": 0.34, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.8983402489626556, |
|
"grad_norm": 2.302095447475987, |
|
"learning_rate": 3.1152337559608725e-07, |
|
"loss": 0.3387, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.9004149377593361, |
|
"grad_norm": 2.380841330938754, |
|
"learning_rate": 2.990649051717348e-07, |
|
"loss": 0.352, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.9024896265560166, |
|
"grad_norm": 2.418959372738933, |
|
"learning_rate": 2.868529836119954e-07, |
|
"loss": 0.3457, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.9045643153526971, |
|
"grad_norm": 2.268185455009799, |
|
"learning_rate": 2.7488825139272037e-07, |
|
"loss": 0.3388, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.9066390041493776, |
|
"grad_norm": 2.58650821667144, |
|
"learning_rate": 2.631713360254734e-07, |
|
"loss": 0.3437, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.9087136929460581, |
|
"grad_norm": 2.361151198560315, |
|
"learning_rate": 2.5170285202462895e-07, |
|
"loss": 0.3492, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.9107883817427386, |
|
"grad_norm": 2.5092489039330106, |
|
"learning_rate": 2.4048340087513665e-07, |
|
"loss": 0.3442, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.9128630705394191, |
|
"grad_norm": 2.6062539533561107, |
|
"learning_rate": 2.295135710009755e-07, |
|
"loss": 0.3328, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.9149377593360996, |
|
"grad_norm": 2.558993432851139, |
|
"learning_rate": 2.1879393773429903e-07, |
|
"loss": 0.3555, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.91701244813278, |
|
"grad_norm": 2.3603294500033822, |
|
"learning_rate": 2.0832506328525237e-07, |
|
"loss": 0.3413, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.9190871369294605, |
|
"grad_norm": 2.43359253209144, |
|
"learning_rate": 1.9810749671249353e-07, |
|
"loss": 0.3488, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.921161825726141, |
|
"grad_norm": 2.4744935433164623, |
|
"learning_rate": 1.8814177389439304e-07, |
|
"loss": 0.3352, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.9232365145228216, |
|
"grad_norm": 2.438754761054924, |
|
"learning_rate": 1.784284175009282e-07, |
|
"loss": 0.3428, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.9253112033195021, |
|
"grad_norm": 2.545159189265119, |
|
"learning_rate": 1.68967936966275e-07, |
|
"loss": 0.3442, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.9273858921161826, |
|
"grad_norm": 2.324326221675781, |
|
"learning_rate": 1.5976082846208552e-07, |
|
"loss": 0.3367, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.9294605809128631, |
|
"grad_norm": 2.3175819295965567, |
|
"learning_rate": 1.5080757487146823e-07, |
|
"loss": 0.3408, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.9315352697095436, |
|
"grad_norm": 2.479255280298891, |
|
"learning_rate": 1.4210864576365891e-07, |
|
"loss": 0.3421, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.9336099585062241, |
|
"grad_norm": 2.630435075469239, |
|
"learning_rate": 1.3366449736939845e-07, |
|
"loss": 0.3474, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.9356846473029046, |
|
"grad_norm": 2.6920940284166393, |
|
"learning_rate": 1.254755725570006e-07, |
|
"loss": 0.3481, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.9377593360995851, |
|
"grad_norm": 2.3917785598046684, |
|
"learning_rate": 1.1754230080912588e-07, |
|
"loss": 0.3452, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.9398340248962656, |
|
"grad_norm": 2.396762744912798, |
|
"learning_rate": 1.0986509820025915e-07, |
|
"loss": 0.3422, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.941908713692946, |
|
"grad_norm": 2.4090495372842993, |
|
"learning_rate": 1.0244436737488373e-07, |
|
"loss": 0.3326, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.9439834024896265, |
|
"grad_norm": 2.3974245359764206, |
|
"learning_rate": 9.528049752636714e-08, |
|
"loss": 0.3464, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.946058091286307, |
|
"grad_norm": 2.4287767854527242, |
|
"learning_rate": 8.837386437654861e-08, |
|
"loss": 0.3416, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.9481327800829875, |
|
"grad_norm": 2.436296796369512, |
|
"learning_rate": 8.172483015603139e-08, |
|
"loss": 0.3424, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.950207468879668, |
|
"grad_norm": 2.4024100510658344, |
|
"learning_rate": 7.533374358518974e-08, |
|
"loss": 0.3493, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.9522821576763485, |
|
"grad_norm": 2.5019092758372463, |
|
"learning_rate": 6.920093985587351e-08, |
|
"loss": 0.3495, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.9543568464730291, |
|
"grad_norm": 2.3464048207997568, |
|
"learning_rate": 6.332674061383492e-08, |
|
"loss": 0.3314, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.9564315352697096, |
|
"grad_norm": 2.3715524232082816, |
|
"learning_rate": 5.7711453941852736e-08, |
|
"loss": 0.3368, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.9585062240663901, |
|
"grad_norm": 2.3246413982828487, |
|
"learning_rate": 5.23553743435784e-08, |
|
"loss": 0.3353, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.9605809128630706, |
|
"grad_norm": 2.4061240655823926, |
|
"learning_rate": 4.7258782728092366e-08, |
|
"loss": 0.3371, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.9626556016597511, |
|
"grad_norm": 2.4127269640041438, |
|
"learning_rate": 4.2421946395164174e-08, |
|
"loss": 0.3423, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.9647302904564315, |
|
"grad_norm": 2.474699085905208, |
|
"learning_rate": 3.784511902124033e-08, |
|
"loss": 0.3459, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.966804979253112, |
|
"grad_norm": 2.421582088295479, |
|
"learning_rate": 3.352854064613553e-08, |
|
"loss": 0.3453, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.9688796680497925, |
|
"grad_norm": 2.3553474162917194, |
|
"learning_rate": 2.9472437660446605e-08, |
|
"loss": 0.3425, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.970954356846473, |
|
"grad_norm": 2.3882039614225965, |
|
"learning_rate": 2.56770227936759e-08, |
|
"loss": 0.3382, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.9730290456431535, |
|
"grad_norm": 2.2901989852760574, |
|
"learning_rate": 2.2142495103075757e-08, |
|
"loss": 0.3413, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.975103734439834, |
|
"grad_norm": 2.4622979650301944, |
|
"learning_rate": 1.8869039963210766e-08, |
|
"loss": 0.3426, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.9771784232365145, |
|
"grad_norm": 2.470569893405746, |
|
"learning_rate": 1.585682905623054e-08, |
|
"loss": 0.3485, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.979253112033195, |
|
"grad_norm": 2.3429704203873727, |
|
"learning_rate": 1.3106020362869675e-08, |
|
"loss": 0.3401, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.9813278008298755, |
|
"grad_norm": 2.3091692082166575, |
|
"learning_rate": 1.0616758154161633e-08, |
|
"loss": 0.3482, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.983402489626556, |
|
"grad_norm": 2.283581606108484, |
|
"learning_rate": 8.38917298386921e-09, |
|
"loss": 0.3412, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.9854771784232366, |
|
"grad_norm": 2.4020666228611174, |
|
"learning_rate": 6.423381681641694e-09, |
|
"loss": 0.3505, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.9875518672199171, |
|
"grad_norm": 2.3721276729518634, |
|
"learning_rate": 4.719487346884211e-09, |
|
"loss": 0.3479, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.9896265560165975, |
|
"grad_norm": 2.505461029665658, |
|
"learning_rate": 3.277579343352599e-09, |
|
"loss": 0.3446, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.991701244813278, |
|
"grad_norm": 2.477325943722518, |
|
"learning_rate": 2.097733294464943e-09, |
|
"loss": 0.3427, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.9937759336099585, |
|
"grad_norm": 2.5425962242114286, |
|
"learning_rate": 1.1800110793358521e-09, |
|
"loss": 0.3446, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.995850622406639, |
|
"grad_norm": 2.278148874258288, |
|
"learning_rate": 5.244608295323916e-10, |
|
"loss": 0.3313, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.9979253112033195, |
|
"grad_norm": 2.469289521593084, |
|
"learning_rate": 1.3111692654776963e-10, |
|
"loss": 0.3323, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.5163002141753514, |
|
"learning_rate": 0.0, |
|
"loss": 0.3541, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 3.5475, |
|
"eval_samples_per_second": 2.819, |
|
"eval_steps_per_second": 0.846, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2410, |
|
"total_flos": 252302485094400.0, |
|
"train_loss": 0.0, |
|
"train_runtime": 0.0091, |
|
"train_samples_per_second": 4252334.509, |
|
"train_steps_per_second": 265770.907 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2409, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 252302485094400.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|