|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2414, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00041425020712510354, |
|
"grad_norm": 24.199668764830136, |
|
"learning_rate": 4.132231404958678e-08, |
|
"loss": 1.3991, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002071251035625518, |
|
"grad_norm": 23.414571187298456, |
|
"learning_rate": 2.066115702479339e-07, |
|
"loss": 1.4112, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004142502071251036, |
|
"grad_norm": 14.988102359744419, |
|
"learning_rate": 4.132231404958678e-07, |
|
"loss": 1.3662, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006213753106876553, |
|
"grad_norm": 8.653198188938534, |
|
"learning_rate": 6.198347107438018e-07, |
|
"loss": 1.2641, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008285004142502071, |
|
"grad_norm": 10.616310133905728, |
|
"learning_rate": 8.264462809917356e-07, |
|
"loss": 1.1368, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010356255178127589, |
|
"grad_norm": 4.668322782816374, |
|
"learning_rate": 1.0330578512396695e-06, |
|
"loss": 1.0379, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.012427506213753107, |
|
"grad_norm": 3.5182741608551535, |
|
"learning_rate": 1.2396694214876035e-06, |
|
"loss": 0.9937, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.014498757249378625, |
|
"grad_norm": 3.3411660569699504, |
|
"learning_rate": 1.4462809917355372e-06, |
|
"loss": 0.9572, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.016570008285004142, |
|
"grad_norm": 3.037596563067607, |
|
"learning_rate": 1.6528925619834712e-06, |
|
"loss": 0.9289, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.018641259320629662, |
|
"grad_norm": 3.2528069002854374, |
|
"learning_rate": 1.859504132231405e-06, |
|
"loss": 0.9214, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.020712510356255178, |
|
"grad_norm": 3.2107014690984803, |
|
"learning_rate": 2.066115702479339e-06, |
|
"loss": 0.9086, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.022783761391880698, |
|
"grad_norm": 2.9859094433454554, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 0.8991, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.024855012427506214, |
|
"grad_norm": 3.0590584148615627, |
|
"learning_rate": 2.479338842975207e-06, |
|
"loss": 0.8916, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.026926263463131733, |
|
"grad_norm": 2.9914820848818446, |
|
"learning_rate": 2.6859504132231405e-06, |
|
"loss": 0.8785, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02899751449875725, |
|
"grad_norm": 2.9775144313765445, |
|
"learning_rate": 2.8925619834710743e-06, |
|
"loss": 0.8897, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03106876553438277, |
|
"grad_norm": 3.128569537433572, |
|
"learning_rate": 3.0991735537190086e-06, |
|
"loss": 0.8834, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.033140016570008285, |
|
"grad_norm": 3.1281090024503757, |
|
"learning_rate": 3.3057851239669424e-06, |
|
"loss": 0.8651, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.035211267605633804, |
|
"grad_norm": 3.0548507580726625, |
|
"learning_rate": 3.5123966942148763e-06, |
|
"loss": 0.8737, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.037282518641259324, |
|
"grad_norm": 3.088551853774246, |
|
"learning_rate": 3.71900826446281e-06, |
|
"loss": 0.8623, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03935376967688484, |
|
"grad_norm": 3.117570527250618, |
|
"learning_rate": 3.925619834710744e-06, |
|
"loss": 0.838, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.041425020712510356, |
|
"grad_norm": 3.248544738727649, |
|
"learning_rate": 4.132231404958678e-06, |
|
"loss": 0.8675, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.043496271748135876, |
|
"grad_norm": 3.096171447698008, |
|
"learning_rate": 4.338842975206612e-06, |
|
"loss": 0.8394, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.045567522783761395, |
|
"grad_norm": 3.233248587377124, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.857, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04763877381938691, |
|
"grad_norm": 3.6001636024321293, |
|
"learning_rate": 4.75206611570248e-06, |
|
"loss": 0.8321, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.04971002485501243, |
|
"grad_norm": 2.9956895468112084, |
|
"learning_rate": 4.958677685950414e-06, |
|
"loss": 0.833, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05178127589063795, |
|
"grad_norm": 3.2710428266169953, |
|
"learning_rate": 5.165289256198347e-06, |
|
"loss": 0.847, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.053852526926263466, |
|
"grad_norm": 3.249727950554207, |
|
"learning_rate": 5.371900826446281e-06, |
|
"loss": 0.8491, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05592377796188898, |
|
"grad_norm": 3.638097729325915, |
|
"learning_rate": 5.578512396694216e-06, |
|
"loss": 0.8218, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.0579950289975145, |
|
"grad_norm": 3.136515122305162, |
|
"learning_rate": 5.785123966942149e-06, |
|
"loss": 0.8367, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06006628003314002, |
|
"grad_norm": 2.992975297054502, |
|
"learning_rate": 5.991735537190083e-06, |
|
"loss": 0.8331, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06213753106876554, |
|
"grad_norm": 3.1773736798479653, |
|
"learning_rate": 6.198347107438017e-06, |
|
"loss": 0.8159, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06420878210439106, |
|
"grad_norm": 3.2512013569633, |
|
"learning_rate": 6.404958677685951e-06, |
|
"loss": 0.8261, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06628003314001657, |
|
"grad_norm": 3.0823463887606852, |
|
"learning_rate": 6.611570247933885e-06, |
|
"loss": 0.8376, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06835128417564208, |
|
"grad_norm": 3.1208323742016897, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 0.8275, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07042253521126761, |
|
"grad_norm": 3.0745780302802332, |
|
"learning_rate": 7.0247933884297525e-06, |
|
"loss": 0.8249, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07249378624689312, |
|
"grad_norm": 3.119480709134969, |
|
"learning_rate": 7.231404958677687e-06, |
|
"loss": 0.8042, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.07456503728251865, |
|
"grad_norm": 3.3248978246402663, |
|
"learning_rate": 7.43801652892562e-06, |
|
"loss": 0.8284, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07663628831814416, |
|
"grad_norm": 3.101088437755532, |
|
"learning_rate": 7.644628099173555e-06, |
|
"loss": 0.8158, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.07870753935376967, |
|
"grad_norm": 3.3761252638628707, |
|
"learning_rate": 7.851239669421489e-06, |
|
"loss": 0.8082, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0807787903893952, |
|
"grad_norm": 3.211138010347251, |
|
"learning_rate": 8.057851239669421e-06, |
|
"loss": 0.8066, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08285004142502071, |
|
"grad_norm": 3.390864723290171, |
|
"learning_rate": 8.264462809917356e-06, |
|
"loss": 0.8071, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08492129246064622, |
|
"grad_norm": 2.9884296341537775, |
|
"learning_rate": 8.47107438016529e-06, |
|
"loss": 0.8173, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.08699254349627175, |
|
"grad_norm": 2.9929265463289774, |
|
"learning_rate": 8.677685950413224e-06, |
|
"loss": 0.8139, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08906379453189726, |
|
"grad_norm": 3.178365772310986, |
|
"learning_rate": 8.884297520661158e-06, |
|
"loss": 0.8077, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.09113504556752279, |
|
"grad_norm": 3.0990381736081556, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.7962, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.0932062966031483, |
|
"grad_norm": 3.3389255178168322, |
|
"learning_rate": 9.297520661157025e-06, |
|
"loss": 0.7985, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.09527754763877382, |
|
"grad_norm": 3.3752236468584456, |
|
"learning_rate": 9.50413223140496e-06, |
|
"loss": 0.796, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09734879867439934, |
|
"grad_norm": 3.0456999521377877, |
|
"learning_rate": 9.710743801652894e-06, |
|
"loss": 0.7983, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.09942004971002485, |
|
"grad_norm": 3.2120044737399187, |
|
"learning_rate": 9.917355371900828e-06, |
|
"loss": 0.8055, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.10149130074565037, |
|
"grad_norm": 3.132342089203479, |
|
"learning_rate": 9.999952928077044e-06, |
|
"loss": 0.8021, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1035625517812759, |
|
"grad_norm": 2.9894402985924766, |
|
"learning_rate": 9.999665269535307e-06, |
|
"loss": 0.8013, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1056338028169014, |
|
"grad_norm": 3.046903074957559, |
|
"learning_rate": 9.99911611854702e-06, |
|
"loss": 0.7979, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.10770505385252693, |
|
"grad_norm": 2.8665185067493626, |
|
"learning_rate": 9.998305503833872e-06, |
|
"loss": 0.8073, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.10977630488815245, |
|
"grad_norm": 3.2409413642928278, |
|
"learning_rate": 9.997233467792626e-06, |
|
"loss": 0.7976, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.11184755592377796, |
|
"grad_norm": 2.944077687828054, |
|
"learning_rate": 9.995900066492902e-06, |
|
"loss": 0.8016, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.11391880695940348, |
|
"grad_norm": 2.9912116980156145, |
|
"learning_rate": 9.994305369674242e-06, |
|
"loss": 0.7853, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.115990057995029, |
|
"grad_norm": 2.892932100424063, |
|
"learning_rate": 9.992449460742464e-06, |
|
"loss": 0.8046, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.11806130903065451, |
|
"grad_norm": 3.055501054823764, |
|
"learning_rate": 9.9903324367653e-06, |
|
"loss": 0.7794, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.12013256006628004, |
|
"grad_norm": 2.7659846238569052, |
|
"learning_rate": 9.98795440846732e-06, |
|
"loss": 0.7814, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.12220381110190555, |
|
"grad_norm": 3.0121040906813126, |
|
"learning_rate": 9.985315500224135e-06, |
|
"loss": 0.7809, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.12427506213753108, |
|
"grad_norm": 3.01982583757692, |
|
"learning_rate": 9.982415850055902e-06, |
|
"loss": 0.781, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1263463131731566, |
|
"grad_norm": 2.716394804747616, |
|
"learning_rate": 9.979255609620095e-06, |
|
"loss": 0.7734, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.12841756420878211, |
|
"grad_norm": 2.690802383393494, |
|
"learning_rate": 9.975834944203581e-06, |
|
"loss": 0.7667, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1304888152444076, |
|
"grad_norm": 2.9488089043416124, |
|
"learning_rate": 9.972154032713973e-06, |
|
"loss": 0.7805, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.13256006628003314, |
|
"grad_norm": 2.719266979190075, |
|
"learning_rate": 9.968213067670265e-06, |
|
"loss": 0.7763, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.13463131731565867, |
|
"grad_norm": 2.812329679333236, |
|
"learning_rate": 9.964012255192776e-06, |
|
"loss": 0.7533, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.13670256835128416, |
|
"grad_norm": 2.939179450726379, |
|
"learning_rate": 9.959551814992364e-06, |
|
"loss": 0.7538, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1387738193869097, |
|
"grad_norm": 3.0343927565408566, |
|
"learning_rate": 9.954831980358928e-06, |
|
"loss": 0.7761, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.14084507042253522, |
|
"grad_norm": 3.051369274408743, |
|
"learning_rate": 9.949852998149217e-06, |
|
"loss": 0.7623, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.14291632145816072, |
|
"grad_norm": 2.8926023239726666, |
|
"learning_rate": 9.944615128773911e-06, |
|
"loss": 0.7449, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.14498757249378624, |
|
"grad_norm": 3.0274876780182725, |
|
"learning_rate": 9.939118646184007e-06, |
|
"loss": 0.7564, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.14705882352941177, |
|
"grad_norm": 3.030086237395326, |
|
"learning_rate": 9.933363837856485e-06, |
|
"loss": 0.7636, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1491300745650373, |
|
"grad_norm": 2.7216718925144443, |
|
"learning_rate": 9.927351004779275e-06, |
|
"loss": 0.7523, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1512013256006628, |
|
"grad_norm": 2.8963011471510423, |
|
"learning_rate": 9.921080461435522e-06, |
|
"loss": 0.747, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.15327257663628832, |
|
"grad_norm": 2.908273407418608, |
|
"learning_rate": 9.914552535787122e-06, |
|
"loss": 0.7325, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.15534382767191385, |
|
"grad_norm": 2.8676437077981722, |
|
"learning_rate": 9.90776756925758e-06, |
|
"loss": 0.7349, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.15741507870753935, |
|
"grad_norm": 2.8556939586506687, |
|
"learning_rate": 9.900725916714157e-06, |
|
"loss": 0.7524, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.15948632974316487, |
|
"grad_norm": 2.87186116366176, |
|
"learning_rate": 9.893427946449297e-06, |
|
"loss": 0.7214, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.1615575807787904, |
|
"grad_norm": 2.8757200745024485, |
|
"learning_rate": 9.885874040161373e-06, |
|
"loss": 0.7326, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1636288318144159, |
|
"grad_norm": 3.215961320189587, |
|
"learning_rate": 9.878064592934723e-06, |
|
"loss": 0.7227, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.16570008285004142, |
|
"grad_norm": 2.9059437980483693, |
|
"learning_rate": 9.87000001321898e-06, |
|
"loss": 0.7509, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.16777133388566695, |
|
"grad_norm": 2.685626883478994, |
|
"learning_rate": 9.86168072280772e-06, |
|
"loss": 0.7223, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.16984258492129245, |
|
"grad_norm": 3.033273475512452, |
|
"learning_rate": 9.853107156816393e-06, |
|
"loss": 0.7385, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.17191383595691798, |
|
"grad_norm": 2.7784947677053955, |
|
"learning_rate": 9.844279763659566e-06, |
|
"loss": 0.7237, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.1739850869925435, |
|
"grad_norm": 2.9117670580445543, |
|
"learning_rate": 9.835199005027477e-06, |
|
"loss": 0.7161, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.176056338028169, |
|
"grad_norm": 2.8843325957862764, |
|
"learning_rate": 9.825865355861878e-06, |
|
"loss": 0.7256, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.17812758906379453, |
|
"grad_norm": 2.856399843872642, |
|
"learning_rate": 9.816279304331202e-06, |
|
"loss": 0.7142, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.18019884009942005, |
|
"grad_norm": 2.9556721265181163, |
|
"learning_rate": 9.806441351805025e-06, |
|
"loss": 0.7306, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.18227009113504558, |
|
"grad_norm": 2.9810648069102172, |
|
"learning_rate": 9.79635201282785e-06, |
|
"loss": 0.705, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.18434134217067108, |
|
"grad_norm": 2.650650945302262, |
|
"learning_rate": 9.786011815092193e-06, |
|
"loss": 0.7026, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.1864125932062966, |
|
"grad_norm": 2.7614731795144727, |
|
"learning_rate": 9.775421299410977e-06, |
|
"loss": 0.6993, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.18848384424192213, |
|
"grad_norm": 2.8508808311574487, |
|
"learning_rate": 9.764581019689255e-06, |
|
"loss": 0.707, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.19055509527754763, |
|
"grad_norm": 3.1150961713118646, |
|
"learning_rate": 9.753491542895237e-06, |
|
"loss": 0.7186, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.19262634631317316, |
|
"grad_norm": 3.0323106493041325, |
|
"learning_rate": 9.742153449030639e-06, |
|
"loss": 0.6893, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.19469759734879868, |
|
"grad_norm": 3.0499193466413654, |
|
"learning_rate": 9.730567331100333e-06, |
|
"loss": 0.7106, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.19676884838442418, |
|
"grad_norm": 2.8549221082922, |
|
"learning_rate": 9.71873379508136e-06, |
|
"loss": 0.6801, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1988400994200497, |
|
"grad_norm": 2.8978651489251703, |
|
"learning_rate": 9.706653459891207e-06, |
|
"loss": 0.6906, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.20091135045567524, |
|
"grad_norm": 2.7131289500779237, |
|
"learning_rate": 9.694326957355452e-06, |
|
"loss": 0.6793, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.20298260149130073, |
|
"grad_norm": 2.7450048929619033, |
|
"learning_rate": 9.681754932174719e-06, |
|
"loss": 0.6987, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.20505385252692626, |
|
"grad_norm": 2.752895531078258, |
|
"learning_rate": 9.668938041890952e-06, |
|
"loss": 0.693, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2071251035625518, |
|
"grad_norm": 2.808137119286013, |
|
"learning_rate": 9.655876956853025e-06, |
|
"loss": 0.681, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2091963545981773, |
|
"grad_norm": 2.724476444787799, |
|
"learning_rate": 9.64257236018169e-06, |
|
"loss": 0.6716, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2112676056338028, |
|
"grad_norm": 2.748867424602178, |
|
"learning_rate": 9.629024947733836e-06, |
|
"loss": 0.6944, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.21333885666942834, |
|
"grad_norm": 2.9490226550436676, |
|
"learning_rate": 9.615235428066106e-06, |
|
"loss": 0.6844, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.21541010770505387, |
|
"grad_norm": 2.6625828983237634, |
|
"learning_rate": 9.601204522397826e-06, |
|
"loss": 0.6876, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.21748135874067936, |
|
"grad_norm": 2.8580713323817672, |
|
"learning_rate": 9.586932964573298e-06, |
|
"loss": 0.6675, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2195526097763049, |
|
"grad_norm": 2.65085890829761, |
|
"learning_rate": 9.572421501023403e-06, |
|
"loss": 0.6871, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.22162386081193042, |
|
"grad_norm": 2.8073591225727066, |
|
"learning_rate": 9.557670890726576e-06, |
|
"loss": 0.6806, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.22369511184755592, |
|
"grad_norm": 2.890518621456693, |
|
"learning_rate": 9.5426819051691e-06, |
|
"loss": 0.6842, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.22576636288318144, |
|
"grad_norm": 3.037365740542274, |
|
"learning_rate": 9.527455328304756e-06, |
|
"loss": 0.6706, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.22783761391880697, |
|
"grad_norm": 3.007466890807781, |
|
"learning_rate": 9.511991956513828e-06, |
|
"loss": 0.6768, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.22990886495443247, |
|
"grad_norm": 2.991724717996849, |
|
"learning_rate": 9.496292598561445e-06, |
|
"loss": 0.6399, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.231980115990058, |
|
"grad_norm": 2.6643291004806144, |
|
"learning_rate": 9.480358075555278e-06, |
|
"loss": 0.6745, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.23405136702568352, |
|
"grad_norm": 2.7494300595409076, |
|
"learning_rate": 9.464189220902603e-06, |
|
"loss": 0.6659, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.23612261806130902, |
|
"grad_norm": 2.778050795482066, |
|
"learning_rate": 9.447786880266706e-06, |
|
"loss": 0.6682, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.23819386909693455, |
|
"grad_norm": 2.8808954872434107, |
|
"learning_rate": 9.431151911522656e-06, |
|
"loss": 0.6603, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.24026512013256007, |
|
"grad_norm": 2.7631310193416008, |
|
"learning_rate": 9.414285184712432e-06, |
|
"loss": 0.6479, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.24233637116818557, |
|
"grad_norm": 2.7504091304269505, |
|
"learning_rate": 9.397187581999424e-06, |
|
"loss": 0.656, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2444076222038111, |
|
"grad_norm": 2.7704909990609345, |
|
"learning_rate": 9.37985999762229e-06, |
|
"loss": 0.6494, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.24647887323943662, |
|
"grad_norm": 2.7880341771333086, |
|
"learning_rate": 9.362303337848188e-06, |
|
"loss": 0.6498, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.24855012427506215, |
|
"grad_norm": 2.7372277502845064, |
|
"learning_rate": 9.344518520925377e-06, |
|
"loss": 0.635, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2506213753106877, |
|
"grad_norm": 2.700220180234652, |
|
"learning_rate": 9.326506477035179e-06, |
|
"loss": 0.6205, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.2526926263463132, |
|
"grad_norm": 2.9446333047323714, |
|
"learning_rate": 9.308268148243355e-06, |
|
"loss": 0.6377, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2547638773819387, |
|
"grad_norm": 2.851475449493436, |
|
"learning_rate": 9.289804488450805e-06, |
|
"loss": 0.6395, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.25683512841756423, |
|
"grad_norm": 2.868033711564291, |
|
"learning_rate": 9.271116463343692e-06, |
|
"loss": 0.6421, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.25890637945318973, |
|
"grad_norm": 2.6514436144580307, |
|
"learning_rate": 9.25220505034293e-06, |
|
"loss": 0.6372, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.2609776304888152, |
|
"grad_norm": 2.9338066764643598, |
|
"learning_rate": 9.23307123855307e-06, |
|
"loss": 0.636, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2630488815244408, |
|
"grad_norm": 3.023298105516956, |
|
"learning_rate": 9.213716028710558e-06, |
|
"loss": 0.6103, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.2651201325600663, |
|
"grad_norm": 2.871190920425405, |
|
"learning_rate": 9.194140433131397e-06, |
|
"loss": 0.6159, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2671913835956918, |
|
"grad_norm": 2.7631786024911187, |
|
"learning_rate": 9.174345475658208e-06, |
|
"loss": 0.6228, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.26926263463131733, |
|
"grad_norm": 2.998454912972155, |
|
"learning_rate": 9.154332191606671e-06, |
|
"loss": 0.6335, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.27133388566694283, |
|
"grad_norm": 2.803386942098811, |
|
"learning_rate": 9.134101627711384e-06, |
|
"loss": 0.6119, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.27340513670256833, |
|
"grad_norm": 2.6219329149002277, |
|
"learning_rate": 9.113654842071114e-06, |
|
"loss": 0.6171, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.2754763877381939, |
|
"grad_norm": 2.61515633927928, |
|
"learning_rate": 9.092992904093451e-06, |
|
"loss": 0.6165, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.2775476387738194, |
|
"grad_norm": 2.8142275849395473, |
|
"learning_rate": 9.072116894438885e-06, |
|
"loss": 0.6091, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2796188898094449, |
|
"grad_norm": 2.8036261300008856, |
|
"learning_rate": 9.051027904964279e-06, |
|
"loss": 0.6266, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.28169014084507044, |
|
"grad_norm": 2.5728545238650122, |
|
"learning_rate": 9.029727038665765e-06, |
|
"loss": 0.625, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.28376139188069593, |
|
"grad_norm": 2.911382813259161, |
|
"learning_rate": 9.008215409621053e-06, |
|
"loss": 0.6023, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.28583264291632143, |
|
"grad_norm": 2.8794319732821956, |
|
"learning_rate": 8.986494142931168e-06, |
|
"loss": 0.6113, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.287903893951947, |
|
"grad_norm": 2.6994578026382956, |
|
"learning_rate": 8.964564374661597e-06, |
|
"loss": 0.6083, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.2899751449875725, |
|
"grad_norm": 2.901823826406794, |
|
"learning_rate": 8.94242725178288e-06, |
|
"loss": 0.616, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.292046396023198, |
|
"grad_norm": 2.653196617958314, |
|
"learning_rate": 8.920083932110608e-06, |
|
"loss": 0.6196, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 2.6749585402475264, |
|
"learning_rate": 8.89753558424488e-06, |
|
"loss": 0.597, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.29618889809444904, |
|
"grad_norm": 2.7369834176016723, |
|
"learning_rate": 8.874783387509181e-06, |
|
"loss": 0.6012, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.2982601491300746, |
|
"grad_norm": 2.845681780523314, |
|
"learning_rate": 8.851828531888692e-06, |
|
"loss": 0.6223, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3003314001657001, |
|
"grad_norm": 2.7849231768312883, |
|
"learning_rate": 8.828672217968055e-06, |
|
"loss": 0.6192, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.3024026512013256, |
|
"grad_norm": 2.7476355382279385, |
|
"learning_rate": 8.805315656868587e-06, |
|
"loss": 0.6053, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.30447390223695114, |
|
"grad_norm": 2.686604793998387, |
|
"learning_rate": 8.781760070184933e-06, |
|
"loss": 0.5934, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.30654515327257664, |
|
"grad_norm": 2.997703713470155, |
|
"learning_rate": 8.75800668992117e-06, |
|
"loss": 0.5956, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.30861640430820214, |
|
"grad_norm": 2.621004060213254, |
|
"learning_rate": 8.734056758426367e-06, |
|
"loss": 0.5974, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3106876553438277, |
|
"grad_norm": 2.6549364932267303, |
|
"learning_rate": 8.709911528329623e-06, |
|
"loss": 0.5948, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3127589063794532, |
|
"grad_norm": 2.7965881278503435, |
|
"learning_rate": 8.685572262474538e-06, |
|
"loss": 0.5967, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.3148301574150787, |
|
"grad_norm": 2.5906530552932185, |
|
"learning_rate": 8.661040233853166e-06, |
|
"loss": 0.5824, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.31690140845070425, |
|
"grad_norm": 2.96203173026875, |
|
"learning_rate": 8.636316725539445e-06, |
|
"loss": 0.5833, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.31897265948632975, |
|
"grad_norm": 2.6456839069829967, |
|
"learning_rate": 8.611403030622074e-06, |
|
"loss": 0.5913, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.32104391052195524, |
|
"grad_norm": 2.552077771616609, |
|
"learning_rate": 8.586300452136895e-06, |
|
"loss": 0.5604, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.3231151615575808, |
|
"grad_norm": 2.681514794026766, |
|
"learning_rate": 8.561010302998734e-06, |
|
"loss": 0.5878, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3251864125932063, |
|
"grad_norm": 2.810277058352099, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.5904, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.3272576636288318, |
|
"grad_norm": 2.823366500468815, |
|
"learning_rate": 8.509872593405189e-06, |
|
"loss": 0.5785, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.32932891466445735, |
|
"grad_norm": 2.537302794266039, |
|
"learning_rate": 8.484027707553818e-06, |
|
"loss": 0.5749, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.33140016570008285, |
|
"grad_norm": 2.679914960811432, |
|
"learning_rate": 8.458000600117604e-06, |
|
"loss": 0.5758, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.33347141673570835, |
|
"grad_norm": 2.714722412588362, |
|
"learning_rate": 8.43179263236608e-06, |
|
"loss": 0.5703, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.3355426677713339, |
|
"grad_norm": 2.8825027456443593, |
|
"learning_rate": 8.40540517502813e-06, |
|
"loss": 0.5741, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.3376139188069594, |
|
"grad_norm": 2.69779454090861, |
|
"learning_rate": 8.378839608220304e-06, |
|
"loss": 0.5758, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.3396851698425849, |
|
"grad_norm": 2.591195744363521, |
|
"learning_rate": 8.35209732137463e-06, |
|
"loss": 0.5703, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.34175642087821045, |
|
"grad_norm": 2.5928484463176056, |
|
"learning_rate": 8.32517971316595e-06, |
|
"loss": 0.5789, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.34382767191383595, |
|
"grad_norm": 2.8392376730929962, |
|
"learning_rate": 8.298088191438753e-06, |
|
"loss": 0.5573, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.34589892294946145, |
|
"grad_norm": 2.717204690264741, |
|
"learning_rate": 8.270824173133563e-06, |
|
"loss": 0.5553, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.347970173985087, |
|
"grad_norm": 2.6426626328945124, |
|
"learning_rate": 8.243389084212808e-06, |
|
"loss": 0.5503, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3500414250207125, |
|
"grad_norm": 2.6361988946032806, |
|
"learning_rate": 8.215784359586257e-06, |
|
"loss": 0.5498, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.352112676056338, |
|
"grad_norm": 2.452756091881905, |
|
"learning_rate": 8.188011443035962e-06, |
|
"loss": 0.538, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.35418392709196356, |
|
"grad_norm": 2.740977671213533, |
|
"learning_rate": 8.160071787140742e-06, |
|
"loss": 0.5452, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.35625517812758906, |
|
"grad_norm": 2.650236162364244, |
|
"learning_rate": 8.131966853200226e-06, |
|
"loss": 0.5485, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.35832642916321455, |
|
"grad_norm": 2.701846208365303, |
|
"learning_rate": 8.103698111158405e-06, |
|
"loss": 0.5495, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.3603976801988401, |
|
"grad_norm": 2.7179474541169104, |
|
"learning_rate": 8.075267039526764e-06, |
|
"loss": 0.548, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3624689312344656, |
|
"grad_norm": 2.833260463644198, |
|
"learning_rate": 8.046675125306948e-06, |
|
"loss": 0.5632, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.36454018227009116, |
|
"grad_norm": 2.7066133568619724, |
|
"learning_rate": 8.017923863912989e-06, |
|
"loss": 0.5475, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.36661143330571666, |
|
"grad_norm": 2.554105810407388, |
|
"learning_rate": 7.989014759093095e-06, |
|
"loss": 0.5455, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.36868268434134216, |
|
"grad_norm": 2.530083794168148, |
|
"learning_rate": 7.959949322850994e-06, |
|
"loss": 0.5397, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3707539353769677, |
|
"grad_norm": 2.6016133268460937, |
|
"learning_rate": 7.930729075366867e-06, |
|
"loss": 0.542, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.3728251864125932, |
|
"grad_norm": 2.5754320630229754, |
|
"learning_rate": 7.901355544917827e-06, |
|
"loss": 0.5362, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3748964374482187, |
|
"grad_norm": 2.576938951435503, |
|
"learning_rate": 7.87183026779799e-06, |
|
"loss": 0.5414, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.37696768848384427, |
|
"grad_norm": 2.6785725784997796, |
|
"learning_rate": 7.842154788238124e-06, |
|
"loss": 0.5512, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.37903893951946976, |
|
"grad_norm": 2.6795394480092414, |
|
"learning_rate": 7.812330658324884e-06, |
|
"loss": 0.5375, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.38111019055509526, |
|
"grad_norm": 2.672382602041246, |
|
"learning_rate": 7.782359437919644e-06, |
|
"loss": 0.5457, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.3831814415907208, |
|
"grad_norm": 2.884729781500387, |
|
"learning_rate": 7.75224269457689e-06, |
|
"loss": 0.5239, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.3852526926263463, |
|
"grad_norm": 2.56507336027056, |
|
"learning_rate": 7.721982003462255e-06, |
|
"loss": 0.5405, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3873239436619718, |
|
"grad_norm": 2.5473419393915, |
|
"learning_rate": 7.691578947270122e-06, |
|
"loss": 0.5438, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.38939519469759737, |
|
"grad_norm": 2.783657224970263, |
|
"learning_rate": 7.661035116140856e-06, |
|
"loss": 0.5198, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.39146644573322287, |
|
"grad_norm": 2.6274430429540927, |
|
"learning_rate": 7.63035210757763e-06, |
|
"loss": 0.5535, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.39353769676884837, |
|
"grad_norm": 2.7442552412406958, |
|
"learning_rate": 7.599531526362873e-06, |
|
"loss": 0.5304, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3956089478044739, |
|
"grad_norm": 2.7785345881187786, |
|
"learning_rate": 7.568574984474335e-06, |
|
"loss": 0.5225, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.3976801988400994, |
|
"grad_norm": 2.720728749217881, |
|
"learning_rate": 7.537484101000787e-06, |
|
"loss": 0.5217, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3997514498757249, |
|
"grad_norm": 2.637160358823368, |
|
"learning_rate": 7.506260502057325e-06, |
|
"loss": 0.5297, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.40182270091135047, |
|
"grad_norm": 2.6335954311355216, |
|
"learning_rate": 7.474905820700334e-06, |
|
"loss": 0.5291, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.40389395194697597, |
|
"grad_norm": 2.656261545637842, |
|
"learning_rate": 7.443421696842066e-06, |
|
"loss": 0.5076, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.40596520298260147, |
|
"grad_norm": 2.6508585415601362, |
|
"learning_rate": 7.411809777164873e-06, |
|
"loss": 0.5183, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.408036454018227, |
|
"grad_norm": 2.665237164150542, |
|
"learning_rate": 7.380071715035089e-06, |
|
"loss": 0.5241, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.4101077050538525, |
|
"grad_norm": 2.5770691509075565, |
|
"learning_rate": 7.3482091704165405e-06, |
|
"loss": 0.5164, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.412178956089478, |
|
"grad_norm": 2.5586140708653584, |
|
"learning_rate": 7.316223809783745e-06, |
|
"loss": 0.4982, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.4142502071251036, |
|
"grad_norm": 2.6304400469333697, |
|
"learning_rate": 7.284117306034733e-06, |
|
"loss": 0.514, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4163214581607291, |
|
"grad_norm": 2.5369318681193667, |
|
"learning_rate": 7.2518913384035685e-06, |
|
"loss": 0.5119, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.4183927091963546, |
|
"grad_norm": 2.7061992404592683, |
|
"learning_rate": 7.219547592372512e-06, |
|
"loss": 0.5175, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.4204639602319801, |
|
"grad_norm": 2.420325437701678, |
|
"learning_rate": 7.187087759583869e-06, |
|
"loss": 0.5063, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 2.7148650041029514, |
|
"learning_rate": 7.15451353775151e-06, |
|
"loss": 0.5237, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.4246064623032311, |
|
"grad_norm": 2.654478692235913, |
|
"learning_rate": 7.121826630572084e-06, |
|
"loss": 0.4966, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.4266777133388567, |
|
"grad_norm": 2.6379021813183354, |
|
"learning_rate": 7.089028747635908e-06, |
|
"loss": 0.4938, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.4287489643744822, |
|
"grad_norm": 2.713654107850754, |
|
"learning_rate": 7.056121604337554e-06, |
|
"loss": 0.5074, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.43082021541010773, |
|
"grad_norm": 2.540085709590772, |
|
"learning_rate": 7.023106921786118e-06, |
|
"loss": 0.4995, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.43289146644573323, |
|
"grad_norm": 2.5985604745407374, |
|
"learning_rate": 6.9899864267152275e-06, |
|
"loss": 0.5318, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.43496271748135873, |
|
"grad_norm": 2.4411457867844417, |
|
"learning_rate": 6.956761851392706e-06, |
|
"loss": 0.4858, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.4370339685169843, |
|
"grad_norm": 2.4714643955598072, |
|
"learning_rate": 6.9234349335299835e-06, |
|
"loss": 0.5171, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.4391052195526098, |
|
"grad_norm": 2.4510153141350868, |
|
"learning_rate": 6.890007416191209e-06, |
|
"loss": 0.4875, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.4411764705882353, |
|
"grad_norm": 2.502867427304289, |
|
"learning_rate": 6.8564810477020835e-06, |
|
"loss": 0.4982, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.44324772162386084, |
|
"grad_norm": 2.5224619340497183, |
|
"learning_rate": 6.822857581558423e-06, |
|
"loss": 0.4971, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.44531897265948633, |
|
"grad_norm": 2.622904137029192, |
|
"learning_rate": 6.789138776334441e-06, |
|
"loss": 0.5063, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.44739022369511183, |
|
"grad_norm": 2.4481597241493174, |
|
"learning_rate": 6.7553263955907755e-06, |
|
"loss": 0.4841, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.4494614747307374, |
|
"grad_norm": 2.505241922300399, |
|
"learning_rate": 6.721422207782249e-06, |
|
"loss": 0.486, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.4515327257663629, |
|
"grad_norm": 2.51038092005137, |
|
"learning_rate": 6.687427986165379e-06, |
|
"loss": 0.4866, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.4536039768019884, |
|
"grad_norm": 2.618228992641671, |
|
"learning_rate": 6.653345508705629e-06, |
|
"loss": 0.4889, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.45567522783761394, |
|
"grad_norm": 2.6041268512577873, |
|
"learning_rate": 6.6191765579844205e-06, |
|
"loss": 0.4957, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.45774647887323944, |
|
"grad_norm": 2.5703382132992414, |
|
"learning_rate": 6.584922921105894e-06, |
|
"loss": 0.4805, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.45981772990886494, |
|
"grad_norm": 2.4810077168179894, |
|
"learning_rate": 6.550586389603451e-06, |
|
"loss": 0.4843, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.4618889809444905, |
|
"grad_norm": 2.7146694842288492, |
|
"learning_rate": 6.5161687593460395e-06, |
|
"loss": 0.4805, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.463960231980116, |
|
"grad_norm": 2.661840488355652, |
|
"learning_rate": 6.481671830444243e-06, |
|
"loss": 0.4766, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.4660314830157415, |
|
"grad_norm": 2.542982024246636, |
|
"learning_rate": 6.447097407156114e-06, |
|
"loss": 0.4714, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.46810273405136704, |
|
"grad_norm": 2.545352453639695, |
|
"learning_rate": 6.412447297792818e-06, |
|
"loss": 0.4627, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.47017398508699254, |
|
"grad_norm": 2.601762512884624, |
|
"learning_rate": 6.377723314624057e-06, |
|
"loss": 0.4617, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.47224523612261804, |
|
"grad_norm": 2.5271935902650746, |
|
"learning_rate": 6.3429272737832726e-06, |
|
"loss": 0.482, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.4743164871582436, |
|
"grad_norm": 2.8257956277427265, |
|
"learning_rate": 6.308060995172673e-06, |
|
"loss": 0.4875, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.4763877381938691, |
|
"grad_norm": 2.6203637102255843, |
|
"learning_rate": 6.273126302368037e-06, |
|
"loss": 0.4725, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.4784589892294946, |
|
"grad_norm": 2.4615255657443855, |
|
"learning_rate": 6.238125022523343e-06, |
|
"loss": 0.462, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.48053024026512015, |
|
"grad_norm": 2.399337323008823, |
|
"learning_rate": 6.203058986275207e-06, |
|
"loss": 0.4706, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.48260149130074564, |
|
"grad_norm": 2.568546498877998, |
|
"learning_rate": 6.1679300276471285e-06, |
|
"loss": 0.46, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.48467274233637114, |
|
"grad_norm": 2.5774193338530886, |
|
"learning_rate": 6.132739983953579e-06, |
|
"loss": 0.4639, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.4867439933719967, |
|
"grad_norm": 2.433169814085788, |
|
"learning_rate": 6.097490695703896e-06, |
|
"loss": 0.4706, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.4888152444076222, |
|
"grad_norm": 2.5672509798739975, |
|
"learning_rate": 6.062184006506027e-06, |
|
"loss": 0.469, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.4908864954432477, |
|
"grad_norm": 2.566412335613826, |
|
"learning_rate": 6.026821762970102e-06, |
|
"loss": 0.4667, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.49295774647887325, |
|
"grad_norm": 2.4468040851684885, |
|
"learning_rate": 5.991405814611855e-06, |
|
"loss": 0.4547, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.49502899751449875, |
|
"grad_norm": 2.490658198568155, |
|
"learning_rate": 5.955938013755888e-06, |
|
"loss": 0.4628, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.4971002485501243, |
|
"grad_norm": 2.463293970038353, |
|
"learning_rate": 5.920420215438794e-06, |
|
"loss": 0.4804, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.4991714995857498, |
|
"grad_norm": 2.6047443491478393, |
|
"learning_rate": 5.8848542773121285e-06, |
|
"loss": 0.468, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.5012427506213754, |
|
"grad_norm": 2.4826419630150407, |
|
"learning_rate": 5.849242059545259e-06, |
|
"loss": 0.4565, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.5033140016570008, |
|
"grad_norm": 2.757538020740051, |
|
"learning_rate": 5.81358542472807e-06, |
|
"loss": 0.4598, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.5053852526926264, |
|
"grad_norm": 2.522678699637508, |
|
"learning_rate": 5.777886237773542e-06, |
|
"loss": 0.4451, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.5074565037282519, |
|
"grad_norm": 2.697025952581437, |
|
"learning_rate": 5.742146365820223e-06, |
|
"loss": 0.4606, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.5095277547638773, |
|
"grad_norm": 2.5255536162735095, |
|
"learning_rate": 5.706367678134562e-06, |
|
"loss": 0.4587, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.5115990057995029, |
|
"grad_norm": 2.5706493150820866, |
|
"learning_rate": 5.670552046013151e-06, |
|
"loss": 0.4626, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.5136702568351285, |
|
"grad_norm": 2.478635300584232, |
|
"learning_rate": 5.634701342684852e-06, |
|
"loss": 0.4495, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.5157415078707539, |
|
"grad_norm": 2.413081651555011, |
|
"learning_rate": 5.598817443212813e-06, |
|
"loss": 0.4545, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.5178127589063795, |
|
"grad_norm": 2.4514043208682614, |
|
"learning_rate": 5.562902224396416e-06, |
|
"loss": 0.4487, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.519884009942005, |
|
"grad_norm": 2.8553325471084894, |
|
"learning_rate": 5.526957564673098e-06, |
|
"loss": 0.4491, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.5219552609776305, |
|
"grad_norm": 2.3499605624954043, |
|
"learning_rate": 5.49098534402012e-06, |
|
"loss": 0.455, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.524026512013256, |
|
"grad_norm": 2.511645835881994, |
|
"learning_rate": 5.454987443856235e-06, |
|
"loss": 0.4405, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.5260977630488816, |
|
"grad_norm": 2.5573246473567663, |
|
"learning_rate": 5.418965746943281e-06, |
|
"loss": 0.4573, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.528169014084507, |
|
"grad_norm": 2.6069496932706446, |
|
"learning_rate": 5.3829221372877175e-06, |
|
"loss": 0.4385, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.5302402651201326, |
|
"grad_norm": 2.3669759764626286, |
|
"learning_rate": 5.34685850004208e-06, |
|
"loss": 0.4474, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5323115161557581, |
|
"grad_norm": 2.5262162943745428, |
|
"learning_rate": 5.310776721406392e-06, |
|
"loss": 0.4465, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.5343827671913836, |
|
"grad_norm": 2.367388706229585, |
|
"learning_rate": 5.2746786885295034e-06, |
|
"loss": 0.4477, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.5364540182270091, |
|
"grad_norm": 2.4424670349956834, |
|
"learning_rate": 5.238566289410396e-06, |
|
"loss": 0.4347, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.5385252692626347, |
|
"grad_norm": 2.374597209278358, |
|
"learning_rate": 5.2024414127994325e-06, |
|
"loss": 0.4414, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5405965202982601, |
|
"grad_norm": 2.363317895955179, |
|
"learning_rate": 5.166305948099574e-06, |
|
"loss": 0.4211, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.5426677713338857, |
|
"grad_norm": 2.445116140490052, |
|
"learning_rate": 5.13016178526756e-06, |
|
"loss": 0.4341, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.5447390223695112, |
|
"grad_norm": 2.451842751824045, |
|
"learning_rate": 5.094010814715062e-06, |
|
"loss": 0.4323, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.5468102734051367, |
|
"grad_norm": 2.4108281284288116, |
|
"learning_rate": 5.057854927209804e-06, |
|
"loss": 0.4363, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.5488815244407622, |
|
"grad_norm": 2.47478290810414, |
|
"learning_rate": 5.0216960137766805e-06, |
|
"loss": 0.429, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.5509527754763878, |
|
"grad_norm": 2.462991177532584, |
|
"learning_rate": 4.985535965598843e-06, |
|
"loss": 0.4504, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.5530240265120132, |
|
"grad_norm": 2.437965270822768, |
|
"learning_rate": 4.949376673918802e-06, |
|
"loss": 0.432, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.5550952775476388, |
|
"grad_norm": 2.588594999778493, |
|
"learning_rate": 4.913220029939491e-06, |
|
"loss": 0.4335, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.5571665285832643, |
|
"grad_norm": 2.624887614176281, |
|
"learning_rate": 4.877067924725368e-06, |
|
"loss": 0.4268, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.5592377796188898, |
|
"grad_norm": 2.59768554910079, |
|
"learning_rate": 4.840922249103506e-06, |
|
"loss": 0.4304, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.5613090306545153, |
|
"grad_norm": 2.3988418599389063, |
|
"learning_rate": 4.804784893564697e-06, |
|
"loss": 0.4411, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 2.580765462139435, |
|
"learning_rate": 4.7686577481645745e-06, |
|
"loss": 0.4082, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.5654515327257663, |
|
"grad_norm": 2.4467281357535855, |
|
"learning_rate": 4.732542702424759e-06, |
|
"loss": 0.4382, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.5675227837613919, |
|
"grad_norm": 2.6370177774043073, |
|
"learning_rate": 4.696441645234042e-06, |
|
"loss": 0.4271, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.5695940347970174, |
|
"grad_norm": 2.475111070321159, |
|
"learning_rate": 4.660356464749578e-06, |
|
"loss": 0.4185, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.5716652858326429, |
|
"grad_norm": 2.6994305643163776, |
|
"learning_rate": 4.624289048298147e-06, |
|
"loss": 0.4259, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5737365368682684, |
|
"grad_norm": 2.4520066328265977, |
|
"learning_rate": 4.588241282277428e-06, |
|
"loss": 0.4179, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.575807787903894, |
|
"grad_norm": 2.3795955584858386, |
|
"learning_rate": 4.55221505205734e-06, |
|
"loss": 0.4287, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5778790389395194, |
|
"grad_norm": 2.381541096732435, |
|
"learning_rate": 4.516212241881448e-06, |
|
"loss": 0.4157, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.579950289975145, |
|
"grad_norm": 2.566306679289895, |
|
"learning_rate": 4.480234734768393e-06, |
|
"loss": 0.4213, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5820215410107705, |
|
"grad_norm": 2.396097079278926, |
|
"learning_rate": 4.444284412413418e-06, |
|
"loss": 0.4172, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.584092792046396, |
|
"grad_norm": 2.3514162631646274, |
|
"learning_rate": 4.408363155089952e-06, |
|
"loss": 0.4211, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5861640430820215, |
|
"grad_norm": 2.3172982824966906, |
|
"learning_rate": 4.3724728415512585e-06, |
|
"loss": 0.4193, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 2.472751185964712, |
|
"learning_rate": 4.3366153489321855e-06, |
|
"loss": 0.4264, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.5903065451532725, |
|
"grad_norm": 2.4154833816633365, |
|
"learning_rate": 4.30079255265098e-06, |
|
"loss": 0.4048, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.5923777961888981, |
|
"grad_norm": 2.5211717793789803, |
|
"learning_rate": 4.265006326311199e-06, |
|
"loss": 0.4089, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.5944490472245236, |
|
"grad_norm": 2.3701199367093504, |
|
"learning_rate": 4.229258541603723e-06, |
|
"loss": 0.4156, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.5965202982601492, |
|
"grad_norm": 2.3994950566503994, |
|
"learning_rate": 4.1935510682088545e-06, |
|
"loss": 0.4065, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5985915492957746, |
|
"grad_norm": 2.4968713927832273, |
|
"learning_rate": 4.157885773698535e-06, |
|
"loss": 0.4057, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.6006628003314002, |
|
"grad_norm": 2.416064472998044, |
|
"learning_rate": 4.122264523438668e-06, |
|
"loss": 0.3931, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.6027340513670257, |
|
"grad_norm": 2.2816034379976355, |
|
"learning_rate": 4.086689180491554e-06, |
|
"loss": 0.4099, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.6048053024026512, |
|
"grad_norm": 2.40826781123055, |
|
"learning_rate": 4.051161605518453e-06, |
|
"loss": 0.4149, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6068765534382767, |
|
"grad_norm": 2.538687087204467, |
|
"learning_rate": 4.015683656682255e-06, |
|
"loss": 0.4175, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.6089478044739023, |
|
"grad_norm": 2.430104423449275, |
|
"learning_rate": 3.980257189550316e-06, |
|
"loss": 0.4053, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.6110190555095277, |
|
"grad_norm": 2.3721045844407143, |
|
"learning_rate": 3.94488405699739e-06, |
|
"loss": 0.4031, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.6130903065451533, |
|
"grad_norm": 2.338789854463612, |
|
"learning_rate": 3.909566109108727e-06, |
|
"loss": 0.3972, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6151615575807788, |
|
"grad_norm": 2.2787527608868943, |
|
"learning_rate": 3.874305193083313e-06, |
|
"loss": 0.411, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.6172328086164043, |
|
"grad_norm": 2.454450620709443, |
|
"learning_rate": 3.839103153137247e-06, |
|
"loss": 0.4008, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6193040596520298, |
|
"grad_norm": 2.4225686330793823, |
|
"learning_rate": 3.803961830407297e-06, |
|
"loss": 0.3974, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.6213753106876554, |
|
"grad_norm": 2.564739658366535, |
|
"learning_rate": 3.768883062854598e-06, |
|
"loss": 0.4152, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6234465617232808, |
|
"grad_norm": 2.4055933426253837, |
|
"learning_rate": 3.7338686851685267e-06, |
|
"loss": 0.4021, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.6255178127589064, |
|
"grad_norm": 2.3297777756780986, |
|
"learning_rate": 3.6989205286707398e-06, |
|
"loss": 0.4087, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.6275890637945319, |
|
"grad_norm": 2.276174828544462, |
|
"learning_rate": 3.664040421219393e-06, |
|
"loss": 0.4, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.6296603148301574, |
|
"grad_norm": 2.558523087497906, |
|
"learning_rate": 3.6292301871135425e-06, |
|
"loss": 0.3919, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6317315658657829, |
|
"grad_norm": 2.599732843239929, |
|
"learning_rate": 3.59449164699773e-06, |
|
"loss": 0.4016, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.6338028169014085, |
|
"grad_norm": 2.4726991401898952, |
|
"learning_rate": 3.55982661776676e-06, |
|
"loss": 0.3986, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.6358740679370339, |
|
"grad_norm": 2.321690978258901, |
|
"learning_rate": 3.5252369124706697e-06, |
|
"loss": 0.3978, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.6379453189726595, |
|
"grad_norm": 2.6754380236168664, |
|
"learning_rate": 3.4907243402199013e-06, |
|
"loss": 0.4113, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.640016570008285, |
|
"grad_norm": 2.4052468560081475, |
|
"learning_rate": 3.4562907060906908e-06, |
|
"loss": 0.3813, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.6420878210439105, |
|
"grad_norm": 2.4570458861707696, |
|
"learning_rate": 3.4219378110306523e-06, |
|
"loss": 0.392, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.644159072079536, |
|
"grad_norm": 2.3101859891504866, |
|
"learning_rate": 3.3876674517645815e-06, |
|
"loss": 0.3996, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.6462303231151616, |
|
"grad_norm": 2.4539409882344483, |
|
"learning_rate": 3.353481420700495e-06, |
|
"loss": 0.3979, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.648301574150787, |
|
"grad_norm": 2.2847760045122087, |
|
"learning_rate": 3.319381505835868e-06, |
|
"loss": 0.3749, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.6503728251864126, |
|
"grad_norm": 2.2918085600506717, |
|
"learning_rate": 3.285369490664133e-06, |
|
"loss": 0.3895, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.6524440762220381, |
|
"grad_norm": 2.4126038265192538, |
|
"learning_rate": 3.251447154081394e-06, |
|
"loss": 0.3885, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.6545153272576636, |
|
"grad_norm": 2.4831233203755603, |
|
"learning_rate": 3.2176162702933816e-06, |
|
"loss": 0.413, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.6565865782932891, |
|
"grad_norm": 2.385955156565656, |
|
"learning_rate": 3.183878608722669e-06, |
|
"loss": 0.3838, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.6586578293289147, |
|
"grad_norm": 2.5426984744026124, |
|
"learning_rate": 3.150235933916115e-06, |
|
"loss": 0.3755, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.6607290803645401, |
|
"grad_norm": 2.388904551962294, |
|
"learning_rate": 3.1166900054525873e-06, |
|
"loss": 0.3868, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.6628003314001657, |
|
"grad_norm": 2.5869530818499147, |
|
"learning_rate": 3.0832425778509235e-06, |
|
"loss": 0.3821, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.6648715824357913, |
|
"grad_norm": 2.47920960360778, |
|
"learning_rate": 3.049895400478174e-06, |
|
"loss": 0.3935, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.6669428334714167, |
|
"grad_norm": 2.4320164826856234, |
|
"learning_rate": 3.0166502174581012e-06, |
|
"loss": 0.3732, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.6690140845070423, |
|
"grad_norm": 2.3447158283526783, |
|
"learning_rate": 2.983508767579956e-06, |
|
"loss": 0.3805, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.6710853355426678, |
|
"grad_norm": 2.336702612441604, |
|
"learning_rate": 2.950472784207544e-06, |
|
"loss": 0.3814, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.6731565865782932, |
|
"grad_norm": 2.403683822019676, |
|
"learning_rate": 2.917543995188562e-06, |
|
"loss": 0.3799, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.6752278376139188, |
|
"grad_norm": 2.32245444127231, |
|
"learning_rate": 2.8847241227642255e-06, |
|
"loss": 0.3878, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.6772990886495444, |
|
"grad_norm": 2.2336601101273894, |
|
"learning_rate": 2.852014883479198e-06, |
|
"loss": 0.3668, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.6793703396851698, |
|
"grad_norm": 2.4572013362431666, |
|
"learning_rate": 2.819417988091814e-06, |
|
"loss": 0.3789, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.6814415907207954, |
|
"grad_norm": 2.374979599510779, |
|
"learning_rate": 2.786935141484586e-06, |
|
"loss": 0.3776, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.6835128417564209, |
|
"grad_norm": 2.387695271521185, |
|
"learning_rate": 2.754568042575061e-06, |
|
"loss": 0.3785, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6855840927920464, |
|
"grad_norm": 2.4380470816357573, |
|
"learning_rate": 2.7223183842269442e-06, |
|
"loss": 0.3762, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.6876553438276719, |
|
"grad_norm": 2.2802764216409996, |
|
"learning_rate": 2.6901878531615677e-06, |
|
"loss": 0.377, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6897265948632975, |
|
"grad_norm": 2.3338582614060748, |
|
"learning_rate": 2.658178129869672e-06, |
|
"loss": 0.382, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.6917978458989229, |
|
"grad_norm": 2.4939738702851932, |
|
"learning_rate": 2.6262908885235046e-06, |
|
"loss": 0.3789, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6938690969345485, |
|
"grad_norm": 2.409154689853176, |
|
"learning_rate": 2.594527796889265e-06, |
|
"loss": 0.3859, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.695940347970174, |
|
"grad_norm": 2.4232651472750724, |
|
"learning_rate": 2.5628905162398797e-06, |
|
"loss": 0.3759, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6980115990057995, |
|
"grad_norm": 2.438606094251203, |
|
"learning_rate": 2.531380701268108e-06, |
|
"loss": 0.3707, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.700082850041425, |
|
"grad_norm": 2.2068267980004164, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.3635, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.7021541010770506, |
|
"grad_norm": 2.4849851469693487, |
|
"learning_rate": 2.4687500537087027e-06, |
|
"loss": 0.3765, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.704225352112676, |
|
"grad_norm": 2.3676710626755604, |
|
"learning_rate": 2.4376324968286154e-06, |
|
"loss": 0.3706, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7062966031483016, |
|
"grad_norm": 2.4665247552413194, |
|
"learning_rate": 2.40664895686991e-06, |
|
"loss": 0.359, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.7083678541839271, |
|
"grad_norm": 2.4228989801005882, |
|
"learning_rate": 2.375801054333409e-06, |
|
"loss": 0.3574, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.7104391052195526, |
|
"grad_norm": 2.447240340812193, |
|
"learning_rate": 2.345090402625822e-06, |
|
"loss": 0.3629, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.7125103562551781, |
|
"grad_norm": 2.494665611756937, |
|
"learning_rate": 2.3145186079753685e-06, |
|
"loss": 0.3607, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.7145816072908037, |
|
"grad_norm": 2.3466738040218433, |
|
"learning_rate": 2.2840872693477694e-06, |
|
"loss": 0.3582, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.7166528583264291, |
|
"grad_norm": 2.4579034785541274, |
|
"learning_rate": 2.253797978362617e-06, |
|
"loss": 0.37, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.7187241093620547, |
|
"grad_norm": 2.294277539263346, |
|
"learning_rate": 2.2236523192101264e-06, |
|
"loss": 0.371, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.7207953603976802, |
|
"grad_norm": 2.3165105524366676, |
|
"learning_rate": 2.193651868568285e-06, |
|
"loss": 0.3533, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7228666114333057, |
|
"grad_norm": 2.3412049707939766, |
|
"learning_rate": 2.16379819552038e-06, |
|
"loss": 0.3598, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.7249378624689312, |
|
"grad_norm": 2.37970184238566, |
|
"learning_rate": 2.1340928614729445e-06, |
|
"loss": 0.3553, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.7270091135045568, |
|
"grad_norm": 2.5101331147915733, |
|
"learning_rate": 2.1045374200740863e-06, |
|
"loss": 0.3589, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.7290803645401823, |
|
"grad_norm": 2.381929011616353, |
|
"learning_rate": 2.075133417132223e-06, |
|
"loss": 0.3544, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.7311516155758078, |
|
"grad_norm": 2.64277485599053, |
|
"learning_rate": 2.045882390535248e-06, |
|
"loss": 0.354, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.7332228666114333, |
|
"grad_norm": 2.2983110375410756, |
|
"learning_rate": 2.016785870170079e-06, |
|
"loss": 0.359, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 2.338818197474944, |
|
"learning_rate": 1.987845377842656e-06, |
|
"loss": 0.3515, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.7373653686826843, |
|
"grad_norm": 2.412156205295859, |
|
"learning_rate": 1.9590624271983406e-06, |
|
"loss": 0.3707, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.7394366197183099, |
|
"grad_norm": 2.3221544897617394, |
|
"learning_rate": 1.9304385236427505e-06, |
|
"loss": 0.3522, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.7415078707539354, |
|
"grad_norm": 2.4316222609963427, |
|
"learning_rate": 1.9019751642630252e-06, |
|
"loss": 0.3574, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.7435791217895609, |
|
"grad_norm": 2.2641479623423635, |
|
"learning_rate": 1.8736738377495196e-06, |
|
"loss": 0.3592, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.7456503728251864, |
|
"grad_norm": 2.3420647549229265, |
|
"learning_rate": 1.8455360243179537e-06, |
|
"loss": 0.3542, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.747721623860812, |
|
"grad_norm": 2.456638840774933, |
|
"learning_rate": 1.8175631956319823e-06, |
|
"loss": 0.3542, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.7497928748964374, |
|
"grad_norm": 2.315911960148082, |
|
"learning_rate": 1.7897568147262323e-06, |
|
"loss": 0.3651, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.751864125932063, |
|
"grad_norm": 2.430456263244821, |
|
"learning_rate": 1.7621183359297817e-06, |
|
"loss": 0.3513, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.7539353769676885, |
|
"grad_norm": 2.2820850020652137, |
|
"learning_rate": 1.7346492047900897e-06, |
|
"loss": 0.344, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.756006628003314, |
|
"grad_norm": 2.4006568265757755, |
|
"learning_rate": 1.7073508579973996e-06, |
|
"loss": 0.3526, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.7580778790389395, |
|
"grad_norm": 2.2901041487912774, |
|
"learning_rate": 1.6802247233095914e-06, |
|
"loss": 0.349, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.7601491300745651, |
|
"grad_norm": 2.4009693108682204, |
|
"learning_rate": 1.6532722194775108e-06, |
|
"loss": 0.3537, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.7622203811101905, |
|
"grad_norm": 2.343710211021654, |
|
"learning_rate": 1.626494756170765e-06, |
|
"loss": 0.3536, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.7642916321458161, |
|
"grad_norm": 2.3742949969171363, |
|
"learning_rate": 1.5998937339039889e-06, |
|
"loss": 0.3542, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.7663628831814416, |
|
"grad_norm": 2.3672793937996093, |
|
"learning_rate": 1.5734705439636017e-06, |
|
"loss": 0.3428, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.7684341342170671, |
|
"grad_norm": 2.2889634008759803, |
|
"learning_rate": 1.5472265683350397e-06, |
|
"loss": 0.3535, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.7705053852526926, |
|
"grad_norm": 2.2816526862864164, |
|
"learning_rate": 1.5211631796304721e-06, |
|
"loss": 0.3452, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.7725766362883182, |
|
"grad_norm": 2.256494769623704, |
|
"learning_rate": 1.495281741017016e-06, |
|
"loss": 0.3569, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.7746478873239436, |
|
"grad_norm": 2.2248161459647333, |
|
"learning_rate": 1.46958360614543e-06, |
|
"loss": 0.3439, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.7767191383595692, |
|
"grad_norm": 2.3902740970389065, |
|
"learning_rate": 1.4440701190793278e-06, |
|
"loss": 0.3301, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.7787903893951947, |
|
"grad_norm": 2.439067358960243, |
|
"learning_rate": 1.4187426142248723e-06, |
|
"loss": 0.3462, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.7808616404308202, |
|
"grad_norm": 2.3392746632984585, |
|
"learning_rate": 1.3936024162609897e-06, |
|
"loss": 0.3408, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.7829328914664457, |
|
"grad_norm": 2.377498700024158, |
|
"learning_rate": 1.3686508400700787e-06, |
|
"loss": 0.3549, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.7850041425020713, |
|
"grad_norm": 2.3496620916951767, |
|
"learning_rate": 1.3438891906692447e-06, |
|
"loss": 0.3472, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.7870753935376967, |
|
"grad_norm": 2.289494721325453, |
|
"learning_rate": 1.3193187631420462e-06, |
|
"loss": 0.3355, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.7891466445733223, |
|
"grad_norm": 2.2947285370456396, |
|
"learning_rate": 1.2949408425707566e-06, |
|
"loss": 0.3394, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.7912178956089478, |
|
"grad_norm": 2.3617970794893126, |
|
"learning_rate": 1.2707567039691505e-06, |
|
"loss": 0.3497, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.7932891466445733, |
|
"grad_norm": 2.321811184391228, |
|
"learning_rate": 1.2467676122158224e-06, |
|
"loss": 0.3412, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.7953603976801988, |
|
"grad_norm": 2.3211063248548967, |
|
"learning_rate": 1.222974821988024e-06, |
|
"loss": 0.3436, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.7974316487158244, |
|
"grad_norm": 2.367048239376449, |
|
"learning_rate": 1.1993795776960498e-06, |
|
"loss": 0.3387, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.7995028997514498, |
|
"grad_norm": 2.415017409117694, |
|
"learning_rate": 1.1759831134181504e-06, |
|
"loss": 0.3477, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.8015741507870754, |
|
"grad_norm": 2.2878955632108737, |
|
"learning_rate": 1.1527866528359805e-06, |
|
"loss": 0.3424, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.8036454018227009, |
|
"grad_norm": 2.4168712983822407, |
|
"learning_rate": 1.1297914091706086e-06, |
|
"loss": 0.3395, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.8057166528583264, |
|
"grad_norm": 2.332403241899359, |
|
"learning_rate": 1.1069985851190524e-06, |
|
"loss": 0.3413, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.8077879038939519, |
|
"grad_norm": 2.245314869032349, |
|
"learning_rate": 1.0844093727913868e-06, |
|
"loss": 0.3302, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.8098591549295775, |
|
"grad_norm": 2.3766452170038814, |
|
"learning_rate": 1.062024953648384e-06, |
|
"loss": 0.3363, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.8119304059652029, |
|
"grad_norm": 2.3725445423461076, |
|
"learning_rate": 1.039846498439727e-06, |
|
"loss": 0.3312, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.8140016570008285, |
|
"grad_norm": 2.400378472961016, |
|
"learning_rate": 1.0178751671427755e-06, |
|
"loss": 0.3406, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.816072908036454, |
|
"grad_norm": 2.600161757489673, |
|
"learning_rate": 9.961121089018933e-07, |
|
"loss": 0.3286, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.8181441590720795, |
|
"grad_norm": 2.4280753174812597, |
|
"learning_rate": 9.745584619683524e-07, |
|
"loss": 0.3439, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.820215410107705, |
|
"grad_norm": 2.260545698321657, |
|
"learning_rate": 9.532153536407923e-07, |
|
"loss": 0.3317, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.8222866611433306, |
|
"grad_norm": 2.4227916089483204, |
|
"learning_rate": 9.320839002062682e-07, |
|
"loss": 0.3312, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.824357912178956, |
|
"grad_norm": 2.8477868846172067, |
|
"learning_rate": 9.111652068818621e-07, |
|
"loss": 0.3334, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.8264291632145816, |
|
"grad_norm": 2.370728174821394, |
|
"learning_rate": 8.904603677568785e-07, |
|
"loss": 0.3278, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.8285004142502072, |
|
"grad_norm": 2.3115016796994183, |
|
"learning_rate": 8.699704657356195e-07, |
|
"loss": 0.3358, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.8305716652858326, |
|
"grad_norm": 2.253238724272844, |
|
"learning_rate": 8.496965724807516e-07, |
|
"loss": 0.3348, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.8326429163214581, |
|
"grad_norm": 2.3696595779036573, |
|
"learning_rate": 8.296397483572515e-07, |
|
"loss": 0.3322, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.8347141673570837, |
|
"grad_norm": 2.3994710782454143, |
|
"learning_rate": 8.098010423769503e-07, |
|
"loss": 0.3159, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.8367854183927091, |
|
"grad_norm": 2.1347268704516638, |
|
"learning_rate": 7.901814921436624e-07, |
|
"loss": 0.3318, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.8388566694283347, |
|
"grad_norm": 2.2628490853732397, |
|
"learning_rate": 7.70782123798921e-07, |
|
"loss": 0.3318, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.8409279204639603, |
|
"grad_norm": 2.2582184248706683, |
|
"learning_rate": 7.516039519683105e-07, |
|
"loss": 0.3234, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.8429991714995857, |
|
"grad_norm": 2.476131958227405, |
|
"learning_rate": 7.326479797083963e-07, |
|
"loss": 0.3278, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 2.3728222901700677, |
|
"learning_rate": 7.139151984542636e-07, |
|
"loss": 0.3306, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.8471416735708368, |
|
"grad_norm": 2.3665157862844994, |
|
"learning_rate": 6.954065879676653e-07, |
|
"loss": 0.3309, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.8492129246064622, |
|
"grad_norm": 2.5008625602985304, |
|
"learning_rate": 6.771231162857722e-07, |
|
"loss": 0.3362, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.8512841756420878, |
|
"grad_norm": 2.2712774856509688, |
|
"learning_rate": 6.590657396705525e-07, |
|
"loss": 0.3215, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.8533554266777134, |
|
"grad_norm": 2.535681331577295, |
|
"learning_rate": 6.412354025587509e-07, |
|
"loss": 0.3229, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.8554266777133389, |
|
"grad_norm": 2.547194182489716, |
|
"learning_rate": 6.236330375124921e-07, |
|
"loss": 0.3461, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.8574979287489644, |
|
"grad_norm": 2.371373910066343, |
|
"learning_rate": 6.062595651705111e-07, |
|
"loss": 0.3389, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.8595691797845899, |
|
"grad_norm": 2.439136346587766, |
|
"learning_rate": 5.891158941999959e-07, |
|
"loss": 0.3299, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.8616404308202155, |
|
"grad_norm": 2.466656635870232, |
|
"learning_rate": 5.722029212490666e-07, |
|
"loss": 0.3298, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.8637116818558409, |
|
"grad_norm": 2.288892577992347, |
|
"learning_rate": 5.555215308998779e-07, |
|
"loss": 0.3223, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.8657829328914665, |
|
"grad_norm": 2.401858716049078, |
|
"learning_rate": 5.390725956223531e-07, |
|
"loss": 0.3218, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.867854183927092, |
|
"grad_norm": 2.33005782306982, |
|
"learning_rate": 5.22856975728554e-07, |
|
"loss": 0.3216, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.8699254349627175, |
|
"grad_norm": 2.4143761257261303, |
|
"learning_rate": 5.068755193276798e-07, |
|
"loss": 0.3307, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.871996685998343, |
|
"grad_norm": 2.352096096095919, |
|
"learning_rate": 4.911290622817161e-07, |
|
"loss": 0.3204, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.8740679370339686, |
|
"grad_norm": 2.3702490110719303, |
|
"learning_rate": 4.756184281617121e-07, |
|
"loss": 0.3245, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.876139188069594, |
|
"grad_norm": 2.2456862499766306, |
|
"learning_rate": 4.6034442820471037e-07, |
|
"loss": 0.3182, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.8782104391052196, |
|
"grad_norm": 2.2534896280263816, |
|
"learning_rate": 4.4530786127131575e-07, |
|
"loss": 0.3312, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.8802816901408451, |
|
"grad_norm": 2.309108572623016, |
|
"learning_rate": 4.305095138039106e-07, |
|
"loss": 0.326, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 2.5387736296808563, |
|
"learning_rate": 4.159501597855287e-07, |
|
"loss": 0.3327, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.8844241922120961, |
|
"grad_norm": 2.4068093993598962, |
|
"learning_rate": 4.0163056069936757e-07, |
|
"loss": 0.3265, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.8864954432477217, |
|
"grad_norm": 2.410912519062069, |
|
"learning_rate": 3.8755146548896784e-07, |
|
"loss": 0.3286, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.8885666942833471, |
|
"grad_norm": 2.3441979713103476, |
|
"learning_rate": 3.737136105190337e-07, |
|
"loss": 0.3215, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.8906379453189727, |
|
"grad_norm": 2.4124574145028843, |
|
"learning_rate": 3.6011771953693044e-07, |
|
"loss": 0.3222, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.8927091963545982, |
|
"grad_norm": 2.3579671682789467, |
|
"learning_rate": 3.4676450363481937e-07, |
|
"loss": 0.3227, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.8947804473902237, |
|
"grad_norm": 2.3464323541089214, |
|
"learning_rate": 3.336546612124758e-07, |
|
"loss": 0.3197, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.8968516984258492, |
|
"grad_norm": 2.4445949626198242, |
|
"learning_rate": 3.20788877940757e-07, |
|
"loss": 0.3164, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.8989229494614748, |
|
"grad_norm": 2.668457673876882, |
|
"learning_rate": 3.081678267257404e-07, |
|
"loss": 0.3233, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.9009942004971002, |
|
"grad_norm": 2.2794632035182865, |
|
"learning_rate": 2.9579216767352815e-07, |
|
"loss": 0.314, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.9030654515327258, |
|
"grad_norm": 2.511600762449477, |
|
"learning_rate": 2.836625480557265e-07, |
|
"loss": 0.3195, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.9051367025683513, |
|
"grad_norm": 2.290095274854777, |
|
"learning_rate": 2.7177960227558863e-07, |
|
"loss": 0.323, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.9072079536039768, |
|
"grad_norm": 2.490832241300732, |
|
"learning_rate": 2.601439518348331e-07, |
|
"loss": 0.3229, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.9092792046396023, |
|
"grad_norm": 2.3369810868473833, |
|
"learning_rate": 2.487562053011422e-07, |
|
"loss": 0.3298, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.9113504556752279, |
|
"grad_norm": 2.511875787240971, |
|
"learning_rate": 2.376169582763288e-07, |
|
"loss": 0.3273, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.9134217067108533, |
|
"grad_norm": 2.4194421741312078, |
|
"learning_rate": 2.2672679336518789e-07, |
|
"loss": 0.3187, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.9154929577464789, |
|
"grad_norm": 2.4729733542961516, |
|
"learning_rate": 2.1608628014502364e-07, |
|
"loss": 0.3209, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.9175642087821044, |
|
"grad_norm": 2.379186052198249, |
|
"learning_rate": 2.0569597513586004e-07, |
|
"loss": 0.3229, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.9196354598177299, |
|
"grad_norm": 2.3319074686966674, |
|
"learning_rate": 1.955564217713335e-07, |
|
"loss": 0.3181, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.9217067108533554, |
|
"grad_norm": 2.6517343047945334, |
|
"learning_rate": 1.8566815037026897e-07, |
|
"loss": 0.323, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.923777961888981, |
|
"grad_norm": 2.232882122217696, |
|
"learning_rate": 1.7603167810894662e-07, |
|
"loss": 0.3196, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.9258492129246064, |
|
"grad_norm": 2.3988532707568186, |
|
"learning_rate": 1.6664750899404892e-07, |
|
"loss": 0.3183, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.927920463960232, |
|
"grad_norm": 2.3744099648372403, |
|
"learning_rate": 1.5751613383630128e-07, |
|
"loss": 0.3181, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.9299917149958575, |
|
"grad_norm": 2.419787212184274, |
|
"learning_rate": 1.4863803022480362e-07, |
|
"loss": 0.3244, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.932062966031483, |
|
"grad_norm": 2.3333109862483856, |
|
"learning_rate": 1.4001366250204762e-07, |
|
"loss": 0.325, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.9341342170671085, |
|
"grad_norm": 2.5299591194957505, |
|
"learning_rate": 1.3164348173963392e-07, |
|
"loss": 0.3251, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.9362054681027341, |
|
"grad_norm": 2.415328780928961, |
|
"learning_rate": 1.235279257146804e-07, |
|
"loss": 0.3317, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.9382767191383595, |
|
"grad_norm": 2.39855625868432, |
|
"learning_rate": 1.1566741888692168e-07, |
|
"loss": 0.3106, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.9403479701739851, |
|
"grad_norm": 2.51301146564224, |
|
"learning_rate": 1.080623723765134e-07, |
|
"loss": 0.3207, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.9424192212096106, |
|
"grad_norm": 2.5451397411628425, |
|
"learning_rate": 1.0071318394252849e-07, |
|
"loss": 0.321, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.9444904722452361, |
|
"grad_norm": 2.3238922238777753, |
|
"learning_rate": 9.362023796215036e-08, |
|
"loss": 0.3115, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.9465617232808616, |
|
"grad_norm": 2.3960161103125595, |
|
"learning_rate": 8.678390541057512e-08, |
|
"loss": 0.3334, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.9486329743164872, |
|
"grad_norm": 2.455291465574181, |
|
"learning_rate": 8.020454384160437e-08, |
|
"loss": 0.321, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.9507042253521126, |
|
"grad_norm": 2.313389085031741, |
|
"learning_rate": 7.388249736894615e-08, |
|
"loss": 0.3155, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.9527754763877382, |
|
"grad_norm": 2.4859967083644507, |
|
"learning_rate": 6.78180966482156e-08, |
|
"loss": 0.3189, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.9548467274233637, |
|
"grad_norm": 2.5150728286365878, |
|
"learning_rate": 6.201165885964311e-08, |
|
"loss": 0.3196, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.9569179784589892, |
|
"grad_norm": 2.5646449395309134, |
|
"learning_rate": 5.646348769148491e-08, |
|
"loss": 0.32, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.9589892294946147, |
|
"grad_norm": 2.4648895794572145, |
|
"learning_rate": 5.117387332413737e-08, |
|
"loss": 0.3142, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.9610604805302403, |
|
"grad_norm": 2.4075943066752292, |
|
"learning_rate": 4.6143092414961396e-08, |
|
"loss": 0.3275, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.9631317315658657, |
|
"grad_norm": 2.5717189064452604, |
|
"learning_rate": 4.1371408083815126e-08, |
|
"loss": 0.3326, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.9652029826014913, |
|
"grad_norm": 2.2405081152048982, |
|
"learning_rate": 3.685906989928656e-08, |
|
"loss": 0.3198, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.9672742336371168, |
|
"grad_norm": 2.493323605860437, |
|
"learning_rate": 3.2606313865646276e-08, |
|
"loss": 0.3158, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.9693454846727423, |
|
"grad_norm": 2.3221489971336307, |
|
"learning_rate": 2.861336241050061e-08, |
|
"loss": 0.3192, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.9714167357083678, |
|
"grad_norm": 2.224686624765676, |
|
"learning_rate": 2.488042437315985e-08, |
|
"loss": 0.3144, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.9734879867439934, |
|
"grad_norm": 2.517150826241084, |
|
"learning_rate": 2.1407694993714755e-08, |
|
"loss": 0.3201, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.9755592377796188, |
|
"grad_norm": 2.4950963422344126, |
|
"learning_rate": 1.8195355902824174e-08, |
|
"loss": 0.3094, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.9776304888152444, |
|
"grad_norm": 2.3719000652949376, |
|
"learning_rate": 1.5243575112218744e-08, |
|
"loss": 0.3069, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.97970173985087, |
|
"grad_norm": 2.532095428796904, |
|
"learning_rate": 1.2552507005909042e-08, |
|
"loss": 0.317, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.9817729908864954, |
|
"grad_norm": 2.508218831541988, |
|
"learning_rate": 1.0122292332114814e-08, |
|
"loss": 0.3214, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.9838442419221209, |
|
"grad_norm": 2.3230365500226156, |
|
"learning_rate": 7.953058195900864e-09, |
|
"loss": 0.3214, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.9859154929577465, |
|
"grad_norm": 2.3842329300121565, |
|
"learning_rate": 6.044918052531268e-09, |
|
"loss": 0.3267, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.987986743993372, |
|
"grad_norm": 2.450379118891722, |
|
"learning_rate": 4.397971701533554e-09, |
|
"loss": 0.3231, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.9900579950289975, |
|
"grad_norm": 2.3274963425198423, |
|
"learning_rate": 3.0123052814812203e-09, |
|
"loss": 0.3284, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.992129246064623, |
|
"grad_norm": 2.4027725490076075, |
|
"learning_rate": 1.887991265486222e-09, |
|
"loss": 0.309, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.9942004971002486, |
|
"grad_norm": 2.5888360240441015, |
|
"learning_rate": 1.025088457409229e-09, |
|
"loss": 0.3177, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.996271748135874, |
|
"grad_norm": 2.5018345143420917, |
|
"learning_rate": 4.2364198878597216e-10, |
|
"loss": 0.3297, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.9983429991714996, |
|
"grad_norm": 2.22608195027372, |
|
"learning_rate": 8.368331646302353e-11, |
|
"loss": 0.3161, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.28806865215301514, |
|
"eval_runtime": 1.191, |
|
"eval_samples_per_second": 2.519, |
|
"eval_steps_per_second": 0.84, |
|
"step": 2414 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2414, |
|
"total_flos": 252721244405760.0, |
|
"train_loss": 0.5156875643339054, |
|
"train_runtime": 24515.4963, |
|
"train_samples_per_second": 1.575, |
|
"train_steps_per_second": 0.098 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2414, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 252721244405760.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|