|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1334, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0014992503748125937, |
|
"grad_norm": 0.027869803162182624, |
|
"learning_rate": 1.4925373134328358e-06, |
|
"loss": 0.0328, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0029985007496251873, |
|
"grad_norm": 0.03034633120050999, |
|
"learning_rate": 2.9850746268656716e-06, |
|
"loss": 0.0521, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004497751124437781, |
|
"grad_norm": 0.033013192126779396, |
|
"learning_rate": 4.477611940298508e-06, |
|
"loss": 0.0481, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005997001499250375, |
|
"grad_norm": 0.02964454530876554, |
|
"learning_rate": 5.970149253731343e-06, |
|
"loss": 0.0348, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0074962518740629685, |
|
"grad_norm": 0.04545013411384094, |
|
"learning_rate": 7.4626865671641785e-06, |
|
"loss": 0.0634, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008995502248875561, |
|
"grad_norm": 0.037827090237667615, |
|
"learning_rate": 8.955223880597016e-06, |
|
"loss": 0.0511, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.010494752623688156, |
|
"grad_norm": 0.04024496430992649, |
|
"learning_rate": 1.0447761194029851e-05, |
|
"loss": 0.0902, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01199400299850075, |
|
"grad_norm": 0.02784723970702267, |
|
"learning_rate": 1.1940298507462686e-05, |
|
"loss": 0.0346, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.013493253373313344, |
|
"grad_norm": 0.04073024185229626, |
|
"learning_rate": 1.3432835820895523e-05, |
|
"loss": 0.0551, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.014992503748125937, |
|
"grad_norm": 0.03872731587077291, |
|
"learning_rate": 1.4925373134328357e-05, |
|
"loss": 0.0457, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01649175412293853, |
|
"grad_norm": 0.036983272179927376, |
|
"learning_rate": 1.6417910447761194e-05, |
|
"loss": 0.0466, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.017991004497751123, |
|
"grad_norm": 0.03450782898126308, |
|
"learning_rate": 1.791044776119403e-05, |
|
"loss": 0.0402, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.019490254872563718, |
|
"grad_norm": 0.0392817087888048, |
|
"learning_rate": 1.9402985074626868e-05, |
|
"loss": 0.0679, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.020989505247376312, |
|
"grad_norm": 0.044590345998053095, |
|
"learning_rate": 2.0895522388059702e-05, |
|
"loss": 0.052, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.022488755622188907, |
|
"grad_norm": 0.0356527349265605, |
|
"learning_rate": 2.238805970149254e-05, |
|
"loss": 0.0468, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0239880059970015, |
|
"grad_norm": 0.03207250371592397, |
|
"learning_rate": 2.3880597014925373e-05, |
|
"loss": 0.037, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.025487256371814093, |
|
"grad_norm": 0.03631797856767525, |
|
"learning_rate": 2.537313432835821e-05, |
|
"loss": 0.0606, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.026986506746626688, |
|
"grad_norm": 0.042515444732217036, |
|
"learning_rate": 2.6865671641791047e-05, |
|
"loss": 0.0527, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02848575712143928, |
|
"grad_norm": 0.03859829754974758, |
|
"learning_rate": 2.835820895522388e-05, |
|
"loss": 0.0577, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.029985007496251874, |
|
"grad_norm": 0.04677384374732077, |
|
"learning_rate": 2.9850746268656714e-05, |
|
"loss": 0.056, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.031484257871064465, |
|
"grad_norm": 0.04751009104400587, |
|
"learning_rate": 3.1343283582089554e-05, |
|
"loss": 0.0623, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03298350824587706, |
|
"grad_norm": 0.029581915526506828, |
|
"learning_rate": 3.283582089552239e-05, |
|
"loss": 0.0468, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.034482758620689655, |
|
"grad_norm": 0.03579505353381419, |
|
"learning_rate": 3.432835820895522e-05, |
|
"loss": 0.0587, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.035982008995502246, |
|
"grad_norm": 0.04751415291633006, |
|
"learning_rate": 3.582089552238806e-05, |
|
"loss": 0.1013, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.037481259370314844, |
|
"grad_norm": 0.04449100113636761, |
|
"learning_rate": 3.73134328358209e-05, |
|
"loss": 0.0661, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.038980509745127435, |
|
"grad_norm": 0.042733677260132046, |
|
"learning_rate": 3.8805970149253736e-05, |
|
"loss": 0.0885, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04047976011994003, |
|
"grad_norm": 0.02626605876758245, |
|
"learning_rate": 4.029850746268657e-05, |
|
"loss": 0.0423, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.041979010494752625, |
|
"grad_norm": 0.03414998677776291, |
|
"learning_rate": 4.1791044776119404e-05, |
|
"loss": 0.0484, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.043478260869565216, |
|
"grad_norm": 0.023465307923170507, |
|
"learning_rate": 4.328358208955224e-05, |
|
"loss": 0.0249, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.044977511244377814, |
|
"grad_norm": 0.03238010472154882, |
|
"learning_rate": 4.477611940298508e-05, |
|
"loss": 0.0544, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.046476761619190406, |
|
"grad_norm": 0.031312824540825665, |
|
"learning_rate": 4.626865671641791e-05, |
|
"loss": 0.046, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.047976011994003, |
|
"grad_norm": 0.03219959967609331, |
|
"learning_rate": 4.7761194029850745e-05, |
|
"loss": 0.058, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.049475262368815595, |
|
"grad_norm": 0.05253387545719208, |
|
"learning_rate": 4.9253731343283586e-05, |
|
"loss": 0.0935, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.050974512743628186, |
|
"grad_norm": 0.04906400811846751, |
|
"learning_rate": 5.074626865671642e-05, |
|
"loss": 0.0592, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05247376311844078, |
|
"grad_norm": 0.028913111785186865, |
|
"learning_rate": 5.223880597014925e-05, |
|
"loss": 0.0422, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.053973013493253376, |
|
"grad_norm": 0.036628261788675495, |
|
"learning_rate": 5.373134328358209e-05, |
|
"loss": 0.056, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05547226386806597, |
|
"grad_norm": 0.03723553635398014, |
|
"learning_rate": 5.5223880597014934e-05, |
|
"loss": 0.0514, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05697151424287856, |
|
"grad_norm": 0.030481090328732696, |
|
"learning_rate": 5.671641791044776e-05, |
|
"loss": 0.0505, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05847076461769116, |
|
"grad_norm": 0.06319461018053213, |
|
"learning_rate": 5.82089552238806e-05, |
|
"loss": 0.0932, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05997001499250375, |
|
"grad_norm": 0.04375744045965157, |
|
"learning_rate": 5.970149253731343e-05, |
|
"loss": 0.052, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06146926536731634, |
|
"grad_norm": 0.02716485170444979, |
|
"learning_rate": 6.119402985074628e-05, |
|
"loss": 0.0394, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06296851574212893, |
|
"grad_norm": 0.03380373227357743, |
|
"learning_rate": 6.268656716417911e-05, |
|
"loss": 0.0444, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06446776611694154, |
|
"grad_norm": 0.023682114706538047, |
|
"learning_rate": 6.417910447761194e-05, |
|
"loss": 0.0318, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06596701649175413, |
|
"grad_norm": 0.03717832489837232, |
|
"learning_rate": 6.567164179104478e-05, |
|
"loss": 0.0542, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06746626686656672, |
|
"grad_norm": 0.03849400793350328, |
|
"learning_rate": 6.716417910447762e-05, |
|
"loss": 0.066, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06896551724137931, |
|
"grad_norm": 0.039557900524499395, |
|
"learning_rate": 6.865671641791044e-05, |
|
"loss": 0.0599, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0704647676161919, |
|
"grad_norm": 0.04183512559026415, |
|
"learning_rate": 7.014925373134329e-05, |
|
"loss": 0.0559, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07196401799100449, |
|
"grad_norm": 0.023819063243281605, |
|
"learning_rate": 7.164179104477612e-05, |
|
"loss": 0.0334, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0734632683658171, |
|
"grad_norm": 0.036181513868464375, |
|
"learning_rate": 7.313432835820896e-05, |
|
"loss": 0.053, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07496251874062969, |
|
"grad_norm": 0.038961606945728214, |
|
"learning_rate": 7.46268656716418e-05, |
|
"loss": 0.0469, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07646176911544228, |
|
"grad_norm": 0.047892810949924564, |
|
"learning_rate": 7.611940298507463e-05, |
|
"loss": 0.0585, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07796101949025487, |
|
"grad_norm": 0.06267828987629875, |
|
"learning_rate": 7.761194029850747e-05, |
|
"loss": 0.0671, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07946026986506746, |
|
"grad_norm": 0.027197819167560006, |
|
"learning_rate": 7.910447761194029e-05, |
|
"loss": 0.0384, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08095952023988005, |
|
"grad_norm": 0.062135219556912424, |
|
"learning_rate": 8.059701492537314e-05, |
|
"loss": 0.0724, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08245877061469266, |
|
"grad_norm": 0.04159649809292002, |
|
"learning_rate": 8.208955223880597e-05, |
|
"loss": 0.0528, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08395802098950525, |
|
"grad_norm": 0.04806451958909231, |
|
"learning_rate": 8.358208955223881e-05, |
|
"loss": 0.0522, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08545727136431784, |
|
"grad_norm": 0.0328648844097681, |
|
"learning_rate": 8.507462686567164e-05, |
|
"loss": 0.0529, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08695652173913043, |
|
"grad_norm": 0.03499304511129161, |
|
"learning_rate": 8.656716417910447e-05, |
|
"loss": 0.0457, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08845577211394302, |
|
"grad_norm": 0.03632609832686457, |
|
"learning_rate": 8.805970149253732e-05, |
|
"loss": 0.0511, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08995502248875563, |
|
"grad_norm": 0.03286113779513323, |
|
"learning_rate": 8.955223880597016e-05, |
|
"loss": 0.0605, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09145427286356822, |
|
"grad_norm": 0.05626334745781654, |
|
"learning_rate": 9.104477611940299e-05, |
|
"loss": 0.0591, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09295352323838081, |
|
"grad_norm": 0.04452407950397946, |
|
"learning_rate": 9.253731343283582e-05, |
|
"loss": 0.0769, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0944527736131934, |
|
"grad_norm": 0.03625308210216529, |
|
"learning_rate": 9.402985074626867e-05, |
|
"loss": 0.0468, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.095952023988006, |
|
"grad_norm": 0.029791030013327606, |
|
"learning_rate": 9.552238805970149e-05, |
|
"loss": 0.0584, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09745127436281859, |
|
"grad_norm": 0.03780831906836486, |
|
"learning_rate": 9.701492537313434e-05, |
|
"loss": 0.0333, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09895052473763119, |
|
"grad_norm": 0.032397150776764055, |
|
"learning_rate": 9.850746268656717e-05, |
|
"loss": 0.0447, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.10044977511244378, |
|
"grad_norm": 0.03634540437911119, |
|
"learning_rate": 0.0001, |
|
"loss": 0.0602, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.10194902548725637, |
|
"grad_norm": 0.025903306639537493, |
|
"learning_rate": 0.00010149253731343284, |
|
"loss": 0.0334, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.10344827586206896, |
|
"grad_norm": 0.05692249372761759, |
|
"learning_rate": 0.00010298507462686569, |
|
"loss": 0.0897, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.10494752623688156, |
|
"grad_norm": 0.04193568812333293, |
|
"learning_rate": 0.0001044776119402985, |
|
"loss": 0.035, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10644677661169415, |
|
"grad_norm": 0.03978190062644848, |
|
"learning_rate": 0.00010597014925373134, |
|
"loss": 0.0442, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10794602698650675, |
|
"grad_norm": 0.041177480827902055, |
|
"learning_rate": 0.00010746268656716419, |
|
"loss": 0.0695, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10944527736131934, |
|
"grad_norm": 0.044042393308047885, |
|
"learning_rate": 0.00010895522388059702, |
|
"loss": 0.061, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.11094452773613193, |
|
"grad_norm": 0.027180434415964242, |
|
"learning_rate": 0.00011044776119402987, |
|
"loss": 0.0316, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11244377811094453, |
|
"grad_norm": 0.03300393578125543, |
|
"learning_rate": 0.00011194029850746269, |
|
"loss": 0.0587, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11394302848575712, |
|
"grad_norm": 0.028528364848481542, |
|
"learning_rate": 0.00011343283582089552, |
|
"loss": 0.0352, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11544227886056972, |
|
"grad_norm": 0.030828012521923, |
|
"learning_rate": 0.00011492537313432837, |
|
"loss": 0.0481, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11694152923538231, |
|
"grad_norm": 0.04083313566006794, |
|
"learning_rate": 0.0001164179104477612, |
|
"loss": 0.0604, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1184407796101949, |
|
"grad_norm": 0.025320856260068247, |
|
"learning_rate": 0.00011791044776119405, |
|
"loss": 0.0425, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1199400299850075, |
|
"grad_norm": 0.025013025974113073, |
|
"learning_rate": 0.00011940298507462686, |
|
"loss": 0.0485, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12143928035982009, |
|
"grad_norm": 0.02829433473565749, |
|
"learning_rate": 0.0001208955223880597, |
|
"loss": 0.0475, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.12293853073463268, |
|
"grad_norm": 0.04936619613683576, |
|
"learning_rate": 0.00012238805970149255, |
|
"loss": 0.0766, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.12443778110944528, |
|
"grad_norm": 0.06517476388380389, |
|
"learning_rate": 0.00012388059701492538, |
|
"loss": 0.0801, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.12593703148425786, |
|
"grad_norm": 0.045016042881905784, |
|
"learning_rate": 0.00012537313432835822, |
|
"loss": 0.051, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.12743628185907047, |
|
"grad_norm": 0.03884803395791614, |
|
"learning_rate": 0.00012686567164179105, |
|
"loss": 0.0594, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12893553223388307, |
|
"grad_norm": 0.028365426279383703, |
|
"learning_rate": 0.00012835820895522389, |
|
"loss": 0.0436, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.13043478260869565, |
|
"grad_norm": 0.030909522037005376, |
|
"learning_rate": 0.00012985074626865672, |
|
"loss": 0.0348, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.13193403298350825, |
|
"grad_norm": 0.0651832129849852, |
|
"learning_rate": 0.00013134328358208955, |
|
"loss": 0.0687, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.13343328335832083, |
|
"grad_norm": 0.043557085072315455, |
|
"learning_rate": 0.0001328358208955224, |
|
"loss": 0.0473, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13493253373313344, |
|
"grad_norm": 0.028845028778833763, |
|
"learning_rate": 0.00013432835820895525, |
|
"loss": 0.0387, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.136431784107946, |
|
"grad_norm": 0.04331404459497082, |
|
"learning_rate": 0.00013582089552238805, |
|
"loss": 0.0619, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13793103448275862, |
|
"grad_norm": 0.03133711164003795, |
|
"learning_rate": 0.0001373134328358209, |
|
"loss": 0.0315, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13943028485757122, |
|
"grad_norm": 0.04502206273984752, |
|
"learning_rate": 0.00013880597014925375, |
|
"loss": 0.0575, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1409295352323838, |
|
"grad_norm": 0.032917699035736186, |
|
"learning_rate": 0.00014029850746268658, |
|
"loss": 0.0398, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1424287856071964, |
|
"grad_norm": 0.04144739884037838, |
|
"learning_rate": 0.00014179104477611942, |
|
"loss": 0.0569, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14392803598200898, |
|
"grad_norm": 0.03128670188478762, |
|
"learning_rate": 0.00014328358208955225, |
|
"loss": 0.039, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1454272863568216, |
|
"grad_norm": 0.044978141143991145, |
|
"learning_rate": 0.00014477611940298508, |
|
"loss": 0.0607, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1469265367316342, |
|
"grad_norm": 0.049596838088832966, |
|
"learning_rate": 0.00014626865671641792, |
|
"loss": 0.053, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.14842578710644677, |
|
"grad_norm": 0.03497503808856175, |
|
"learning_rate": 0.00014776119402985075, |
|
"loss": 0.0513, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.14992503748125938, |
|
"grad_norm": 0.03494679451699245, |
|
"learning_rate": 0.0001492537313432836, |
|
"loss": 0.032, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15142428785607195, |
|
"grad_norm": 0.0572890539993266, |
|
"learning_rate": 0.00015074626865671642, |
|
"loss": 0.0665, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.15292353823088456, |
|
"grad_norm": 0.03898694652987806, |
|
"learning_rate": 0.00015223880597014925, |
|
"loss": 0.0374, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.15442278860569716, |
|
"grad_norm": 0.04199773920037622, |
|
"learning_rate": 0.00015373134328358208, |
|
"loss": 0.0461, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.15592203898050974, |
|
"grad_norm": 0.057829415794355014, |
|
"learning_rate": 0.00015522388059701495, |
|
"loss": 0.0755, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.15742128935532235, |
|
"grad_norm": 0.050148540491503274, |
|
"learning_rate": 0.00015671641791044778, |
|
"loss": 0.0507, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15892053973013492, |
|
"grad_norm": 0.04386621541760065, |
|
"learning_rate": 0.00015820895522388059, |
|
"loss": 0.0452, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.16041979010494753, |
|
"grad_norm": 0.03554877367553605, |
|
"learning_rate": 0.00015970149253731345, |
|
"loss": 0.0428, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1619190404797601, |
|
"grad_norm": 0.11161835133361775, |
|
"learning_rate": 0.00016119402985074628, |
|
"loss": 0.0681, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1634182908545727, |
|
"grad_norm": 0.04645296691253517, |
|
"learning_rate": 0.00016268656716417911, |
|
"loss": 0.0538, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16491754122938532, |
|
"grad_norm": 0.02803028019747501, |
|
"learning_rate": 0.00016417910447761195, |
|
"loss": 0.0272, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1664167916041979, |
|
"grad_norm": 0.03415893742686059, |
|
"learning_rate": 0.00016567164179104478, |
|
"loss": 0.0254, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1679160419790105, |
|
"grad_norm": 0.05809631567240609, |
|
"learning_rate": 0.00016716417910447761, |
|
"loss": 0.0611, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.16941529235382308, |
|
"grad_norm": 0.05603865215472538, |
|
"learning_rate": 0.00016865671641791045, |
|
"loss": 0.0394, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.17091454272863568, |
|
"grad_norm": 0.045375774062388276, |
|
"learning_rate": 0.00017014925373134328, |
|
"loss": 0.0538, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 0.03960126146668028, |
|
"learning_rate": 0.00017164179104477614, |
|
"loss": 0.0423, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 0.037163479499863115, |
|
"learning_rate": 0.00017313432835820895, |
|
"loss": 0.0481, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.17541229385307347, |
|
"grad_norm": 0.039738692107982816, |
|
"learning_rate": 0.00017462686567164178, |
|
"loss": 0.0345, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17691154422788605, |
|
"grad_norm": 0.03833182770101284, |
|
"learning_rate": 0.00017611940298507464, |
|
"loss": 0.0402, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.17841079460269865, |
|
"grad_norm": 0.047548298417749796, |
|
"learning_rate": 0.00017761194029850748, |
|
"loss": 0.0542, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.17991004497751126, |
|
"grad_norm": 0.033462493688485306, |
|
"learning_rate": 0.0001791044776119403, |
|
"loss": 0.0303, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18140929535232383, |
|
"grad_norm": 0.08098170529150021, |
|
"learning_rate": 0.00018059701492537314, |
|
"loss": 0.0802, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.18290854572713644, |
|
"grad_norm": 0.031967576513773445, |
|
"learning_rate": 0.00018208955223880598, |
|
"loss": 0.0291, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.18440779610194902, |
|
"grad_norm": 0.07620934156104307, |
|
"learning_rate": 0.0001835820895522388, |
|
"loss": 0.0737, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18590704647676162, |
|
"grad_norm": 0.05731449355361744, |
|
"learning_rate": 0.00018507462686567165, |
|
"loss": 0.0504, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1874062968515742, |
|
"grad_norm": 0.024809079949469532, |
|
"learning_rate": 0.00018656716417910448, |
|
"loss": 0.0302, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1889055472263868, |
|
"grad_norm": 0.03621287076524117, |
|
"learning_rate": 0.00018805970149253734, |
|
"loss": 0.0557, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1904047976011994, |
|
"grad_norm": 0.0495647456653575, |
|
"learning_rate": 0.00018955223880597015, |
|
"loss": 0.0547, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.191904047976012, |
|
"grad_norm": 0.03485762608266449, |
|
"learning_rate": 0.00019104477611940298, |
|
"loss": 0.041, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1934032983508246, |
|
"grad_norm": 0.059266654938214335, |
|
"learning_rate": 0.00019253731343283584, |
|
"loss": 0.0909, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.19490254872563717, |
|
"grad_norm": 0.08172903239953559, |
|
"learning_rate": 0.00019402985074626867, |
|
"loss": 0.1242, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19640179910044978, |
|
"grad_norm": 0.05505103331536185, |
|
"learning_rate": 0.0001955223880597015, |
|
"loss": 0.08, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.19790104947526238, |
|
"grad_norm": 0.03964613621186028, |
|
"learning_rate": 0.00019701492537313434, |
|
"loss": 0.0465, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.19940029985007496, |
|
"grad_norm": 0.04992604106896031, |
|
"learning_rate": 0.00019850746268656718, |
|
"loss": 0.0503, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.20089955022488756, |
|
"grad_norm": 0.049289471075707976, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0636, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.20239880059970014, |
|
"grad_norm": 0.07369356962323476, |
|
"learning_rate": 0.00019999965730559848, |
|
"loss": 0.0732, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.20389805097451275, |
|
"grad_norm": 0.047181541580997785, |
|
"learning_rate": 0.00019999862922474268, |
|
"loss": 0.0606, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.20539730134932535, |
|
"grad_norm": 0.03887614440789255, |
|
"learning_rate": 0.00019999691576447898, |
|
"loss": 0.036, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.20689655172413793, |
|
"grad_norm": 0.05259010448351047, |
|
"learning_rate": 0.00019999451693655123, |
|
"loss": 0.0631, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.20839580209895053, |
|
"grad_norm": 0.04113206912303282, |
|
"learning_rate": 0.00019999143275740072, |
|
"loss": 0.052, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2098950524737631, |
|
"grad_norm": 0.04177105607090762, |
|
"learning_rate": 0.00019998766324816607, |
|
"loss": 0.0404, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.21139430284857572, |
|
"grad_norm": 0.08334534578136313, |
|
"learning_rate": 0.0001999832084346831, |
|
"loss": 0.0897, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2128935532233883, |
|
"grad_norm": 0.04268046411364645, |
|
"learning_rate": 0.00019997806834748456, |
|
"loss": 0.0427, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2143928035982009, |
|
"grad_norm": 0.03598851664774015, |
|
"learning_rate": 0.00019997224302180006, |
|
"loss": 0.0302, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2158920539730135, |
|
"grad_norm": 0.05555632220145266, |
|
"learning_rate": 0.00019996573249755572, |
|
"loss": 0.0564, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.21739130434782608, |
|
"grad_norm": 0.06965699310085238, |
|
"learning_rate": 0.00019995853681937397, |
|
"loss": 0.071, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.21889055472263869, |
|
"grad_norm": 0.07939241103941817, |
|
"learning_rate": 0.00019995065603657316, |
|
"loss": 0.0907, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.22038980509745126, |
|
"grad_norm": 0.050804495965929154, |
|
"learning_rate": 0.0001999420902031673, |
|
"loss": 0.048, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.22188905547226387, |
|
"grad_norm": 0.0590827091873444, |
|
"learning_rate": 0.00019993283937786563, |
|
"loss": 0.0705, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.22338830584707647, |
|
"grad_norm": 0.06023624941065103, |
|
"learning_rate": 0.0001999229036240723, |
|
"loss": 0.0527, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.22488755622188905, |
|
"grad_norm": 0.03486164684165847, |
|
"learning_rate": 0.00019991228300988585, |
|
"loss": 0.0442, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22638680659670166, |
|
"grad_norm": 0.051399897395934625, |
|
"learning_rate": 0.00019990097760809876, |
|
"loss": 0.0975, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.22788605697151423, |
|
"grad_norm": 0.03483187240018451, |
|
"learning_rate": 0.00019988898749619702, |
|
"loss": 0.0366, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.22938530734632684, |
|
"grad_norm": 0.10145745147250426, |
|
"learning_rate": 0.00019987631275635948, |
|
"loss": 0.0709, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.23088455772113944, |
|
"grad_norm": 0.0738726285389296, |
|
"learning_rate": 0.0001998629534754574, |
|
"loss": 0.1201, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.23238380809595202, |
|
"grad_norm": 0.04853210541031046, |
|
"learning_rate": 0.00019984890974505381, |
|
"loss": 0.0628, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.23388305847076463, |
|
"grad_norm": 0.05279238629760989, |
|
"learning_rate": 0.00019983418166140285, |
|
"loss": 0.0395, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2353823088455772, |
|
"grad_norm": 0.02918089043238743, |
|
"learning_rate": 0.00019981876932544917, |
|
"loss": 0.0295, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2368815592203898, |
|
"grad_norm": 0.06770346724424872, |
|
"learning_rate": 0.00019980267284282717, |
|
"loss": 0.0638, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2383808095952024, |
|
"grad_norm": 0.04833446518136561, |
|
"learning_rate": 0.00019978589232386035, |
|
"loss": 0.0519, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.239880059970015, |
|
"grad_norm": 0.05186898701512186, |
|
"learning_rate": 0.00019976842788356055, |
|
"loss": 0.0478, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2413793103448276, |
|
"grad_norm": 0.03886911667428048, |
|
"learning_rate": 0.00019975027964162702, |
|
"loss": 0.0371, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.24287856071964017, |
|
"grad_norm": 0.05937400672895507, |
|
"learning_rate": 0.00019973144772244582, |
|
"loss": 0.0645, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.24437781109445278, |
|
"grad_norm": 0.046719542118188725, |
|
"learning_rate": 0.00019971193225508879, |
|
"loss": 0.0428, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.24587706146926536, |
|
"grad_norm": 0.06610712938750751, |
|
"learning_rate": 0.0001996917333733128, |
|
"loss": 0.0482, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.24737631184407796, |
|
"grad_norm": 0.050519212755809846, |
|
"learning_rate": 0.0001996708512155587, |
|
"loss": 0.0402, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.24887556221889057, |
|
"grad_norm": 0.047755461648101266, |
|
"learning_rate": 0.00019964928592495045, |
|
"loss": 0.048, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.25037481259370314, |
|
"grad_norm": 0.04539061348669229, |
|
"learning_rate": 0.00019962703764929413, |
|
"loss": 0.0539, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2518740629685157, |
|
"grad_norm": 0.044079218147863274, |
|
"learning_rate": 0.00019960410654107697, |
|
"loss": 0.0338, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.25337331334332835, |
|
"grad_norm": 0.15940447460493573, |
|
"learning_rate": 0.0001995804927574662, |
|
"loss": 0.1907, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.25487256371814093, |
|
"grad_norm": 0.14142827538567665, |
|
"learning_rate": 0.00019955619646030802, |
|
"loss": 0.1205, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2563718140929535, |
|
"grad_norm": 0.044572048202989434, |
|
"learning_rate": 0.00019953121781612657, |
|
"loss": 0.0398, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.25787106446776614, |
|
"grad_norm": 0.06581882047357028, |
|
"learning_rate": 0.00019950555699612267, |
|
"loss": 0.0598, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2593703148425787, |
|
"grad_norm": 0.058819185905422434, |
|
"learning_rate": 0.00019947921417617267, |
|
"loss": 0.0356, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2608695652173913, |
|
"grad_norm": 0.07245731651154715, |
|
"learning_rate": 0.00019945218953682734, |
|
"loss": 0.0554, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2623688155922039, |
|
"grad_norm": 0.0365300697607886, |
|
"learning_rate": 0.00019942448326331055, |
|
"loss": 0.0274, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2638680659670165, |
|
"grad_norm": 0.06298974261981975, |
|
"learning_rate": 0.000199396095545518, |
|
"loss": 0.0607, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2653673163418291, |
|
"grad_norm": 0.11713127968683351, |
|
"learning_rate": 0.00019936702657801587, |
|
"loss": 0.1058, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.26686656671664166, |
|
"grad_norm": 0.06199101074736703, |
|
"learning_rate": 0.00019933727656003963, |
|
"loss": 0.0491, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2683658170914543, |
|
"grad_norm": 0.06866049851783687, |
|
"learning_rate": 0.00019930684569549264, |
|
"loss": 0.0536, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2698650674662669, |
|
"grad_norm": 0.09605253534909008, |
|
"learning_rate": 0.00019927573419294456, |
|
"loss": 0.07, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.27136431784107945, |
|
"grad_norm": 0.08723233109826037, |
|
"learning_rate": 0.00019924394226563018, |
|
"loss": 0.0728, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.272863568215892, |
|
"grad_norm": 0.08669179137921693, |
|
"learning_rate": 0.0001992114701314478, |
|
"loss": 0.0691, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.27436281859070466, |
|
"grad_norm": 0.05913118898122669, |
|
"learning_rate": 0.0001991783180129578, |
|
"loss": 0.066, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 0.051263995779607295, |
|
"learning_rate": 0.00019914448613738106, |
|
"loss": 0.0404, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2773613193403298, |
|
"grad_norm": 0.05477400514379348, |
|
"learning_rate": 0.0001991099747365975, |
|
"loss": 0.0427, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.27886056971514245, |
|
"grad_norm": 0.06920109883069289, |
|
"learning_rate": 0.00019907478404714436, |
|
"loss": 0.0474, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.280359820089955, |
|
"grad_norm": 0.03826038209148185, |
|
"learning_rate": 0.00019903891431021477, |
|
"loss": 0.0419, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2818590704647676, |
|
"grad_norm": 0.10991221849444072, |
|
"learning_rate": 0.00019900236577165576, |
|
"loss": 0.0858, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.28335832083958024, |
|
"grad_norm": 0.031681176003979745, |
|
"learning_rate": 0.00019896513868196704, |
|
"loss": 0.035, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2848575712143928, |
|
"grad_norm": 0.0519531999433836, |
|
"learning_rate": 0.00019892723329629887, |
|
"loss": 0.0464, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2863568215892054, |
|
"grad_norm": 0.05571628339788405, |
|
"learning_rate": 0.0001988886498744505, |
|
"loss": 0.0518, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.28785607196401797, |
|
"grad_norm": 0.06914430596733225, |
|
"learning_rate": 0.00019884938868086835, |
|
"loss": 0.0865, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2893553223388306, |
|
"grad_norm": 0.07723650037620701, |
|
"learning_rate": 0.00019880944998464434, |
|
"loss": 0.07, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2908545727136432, |
|
"grad_norm": 0.03868436237329038, |
|
"learning_rate": 0.00019876883405951377, |
|
"loss": 0.0405, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.29235382308845576, |
|
"grad_norm": 0.03616727905905788, |
|
"learning_rate": 0.00019872754118385367, |
|
"loss": 0.0353, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2938530734632684, |
|
"grad_norm": 0.06407436416080277, |
|
"learning_rate": 0.00019868557164068074, |
|
"loss": 0.0518, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.29535232383808097, |
|
"grad_norm": 0.0885276429432359, |
|
"learning_rate": 0.00019864292571764955, |
|
"loss": 0.071, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.29685157421289354, |
|
"grad_norm": 0.06517887639083755, |
|
"learning_rate": 0.0001985996037070505, |
|
"loss": 0.0472, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2983508245877061, |
|
"grad_norm": 0.03262350195450329, |
|
"learning_rate": 0.00019855560590580778, |
|
"loss": 0.0357, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.29985007496251875, |
|
"grad_norm": 0.06352918641533918, |
|
"learning_rate": 0.0001985109326154774, |
|
"loss": 0.0636, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.30134932533733133, |
|
"grad_norm": 0.06275349247389327, |
|
"learning_rate": 0.0001984655841422451, |
|
"loss": 0.0746, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3028485757121439, |
|
"grad_norm": 0.04177370401732468, |
|
"learning_rate": 0.0001984195607969242, |
|
"loss": 0.0483, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.30434782608695654, |
|
"grad_norm": 0.09421608937730566, |
|
"learning_rate": 0.00019837286289495361, |
|
"loss": 0.0731, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3058470764617691, |
|
"grad_norm": 0.06309582839396509, |
|
"learning_rate": 0.0001983254907563955, |
|
"loss": 0.0806, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3073463268365817, |
|
"grad_norm": 0.043040241993361705, |
|
"learning_rate": 0.00019827744470593314, |
|
"loss": 0.042, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.30884557721139433, |
|
"grad_norm": 0.07592171510511146, |
|
"learning_rate": 0.0001982287250728689, |
|
"loss": 0.0672, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3103448275862069, |
|
"grad_norm": 0.04998022421825792, |
|
"learning_rate": 0.00019817933219112158, |
|
"loss": 0.0598, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3118440779610195, |
|
"grad_norm": 0.06445241515366612, |
|
"learning_rate": 0.0001981292663992245, |
|
"loss": 0.042, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.31334332833583206, |
|
"grad_norm": 0.05511385292159757, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 0.0679, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3148425787106447, |
|
"grad_norm": 0.10911657904245481, |
|
"learning_rate": 0.00019802711746217218, |
|
"loss": 0.0992, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.31634182908545727, |
|
"grad_norm": 0.04607736230949591, |
|
"learning_rate": 0.00019797503501713427, |
|
"loss": 0.0386, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.31784107946026985, |
|
"grad_norm": 0.055336925627864554, |
|
"learning_rate": 0.00019792228106217658, |
|
"loss": 0.0482, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3193403298350825, |
|
"grad_norm": 0.0367000018321132, |
|
"learning_rate": 0.0001978688559588688, |
|
"loss": 0.0468, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.32083958020989506, |
|
"grad_norm": 0.03475056457429854, |
|
"learning_rate": 0.00019781476007338058, |
|
"loss": 0.0303, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.32233883058470764, |
|
"grad_norm": 0.06222566098960902, |
|
"learning_rate": 0.0001977599937764791, |
|
"loss": 0.0643, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3238380809595202, |
|
"grad_norm": 0.08945096189461864, |
|
"learning_rate": 0.0001977045574435264, |
|
"loss": 0.1043, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.32533733133433285, |
|
"grad_norm": 0.056441998030072674, |
|
"learning_rate": 0.00019764845145447689, |
|
"loss": 0.0486, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3268365817091454, |
|
"grad_norm": 0.062483693636514896, |
|
"learning_rate": 0.00019759167619387476, |
|
"loss": 0.0606, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.328335832083958, |
|
"grad_norm": 0.11698538531516464, |
|
"learning_rate": 0.00019753423205085127, |
|
"loss": 0.0507, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.32983508245877063, |
|
"grad_norm": 0.07112373998312807, |
|
"learning_rate": 0.0001974761194191222, |
|
"loss": 0.0504, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3313343328335832, |
|
"grad_norm": 0.04204879182316876, |
|
"learning_rate": 0.00019741733869698495, |
|
"loss": 0.0457, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3328335832083958, |
|
"grad_norm": 0.047081193320726994, |
|
"learning_rate": 0.00019735789028731604, |
|
"loss": 0.0562, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3343328335832084, |
|
"grad_norm": 0.046700620652771065, |
|
"learning_rate": 0.0001972977745975682, |
|
"loss": 0.0387, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.335832083958021, |
|
"grad_norm": 0.05658161180730498, |
|
"learning_rate": 0.00019723699203976766, |
|
"loss": 0.075, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3373313343328336, |
|
"grad_norm": 0.04886519491828949, |
|
"learning_rate": 0.00019717554303051127, |
|
"loss": 0.0438, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.33883058470764615, |
|
"grad_norm": 0.061882324104049265, |
|
"learning_rate": 0.00019711342799096361, |
|
"loss": 0.107, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.3403298350824588, |
|
"grad_norm": 0.05128064620446334, |
|
"learning_rate": 0.00019705064734685425, |
|
"loss": 0.0499, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.34182908545727136, |
|
"grad_norm": 0.037511885925608925, |
|
"learning_rate": 0.00019698720152847468, |
|
"loss": 0.0354, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.34332833583208394, |
|
"grad_norm": 0.054518869510984926, |
|
"learning_rate": 0.00019692309097067546, |
|
"loss": 0.0623, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 0.04111726041766873, |
|
"learning_rate": 0.0001968583161128631, |
|
"loss": 0.0399, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.34632683658170915, |
|
"grad_norm": 0.05016557394284265, |
|
"learning_rate": 0.0001967928773989973, |
|
"loss": 0.0399, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 0.054953298674171765, |
|
"learning_rate": 0.0001967267752775877, |
|
"loss": 0.0453, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.3493253373313343, |
|
"grad_norm": 0.04128614741818805, |
|
"learning_rate": 0.00019666001020169073, |
|
"loss": 0.0437, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.35082458770614694, |
|
"grad_norm": 0.0711824564667931, |
|
"learning_rate": 0.00019659258262890683, |
|
"loss": 0.0849, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3523238380809595, |
|
"grad_norm": 0.042020248359914844, |
|
"learning_rate": 0.00019652449302137704, |
|
"loss": 0.0386, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3538230884557721, |
|
"grad_norm": 0.046481177796938444, |
|
"learning_rate": 0.00019645574184577982, |
|
"loss": 0.0383, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3553223388305847, |
|
"grad_norm": 0.08165767843883412, |
|
"learning_rate": 0.0001963863295733281, |
|
"loss": 0.042, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3568215892053973, |
|
"grad_norm": 0.038903250983456907, |
|
"learning_rate": 0.00019631625667976583, |
|
"loss": 0.0341, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3583208395802099, |
|
"grad_norm": 0.04053657985889596, |
|
"learning_rate": 0.00019624552364536473, |
|
"loss": 0.0348, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3598200899550225, |
|
"grad_norm": 0.04790719363132042, |
|
"learning_rate": 0.00019617413095492114, |
|
"loss": 0.0386, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3613193403298351, |
|
"grad_norm": 0.053683248937234807, |
|
"learning_rate": 0.00019610207909775255, |
|
"loss": 0.0714, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.36281859070464767, |
|
"grad_norm": 0.038744106761683335, |
|
"learning_rate": 0.0001960293685676943, |
|
"loss": 0.0456, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.36431784107946025, |
|
"grad_norm": 0.05154181261803494, |
|
"learning_rate": 0.00019595599986309628, |
|
"loss": 0.0581, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3658170914542729, |
|
"grad_norm": 0.04824861446452559, |
|
"learning_rate": 0.0001958819734868193, |
|
"loss": 0.0706, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.36731634182908546, |
|
"grad_norm": 0.03758555173738341, |
|
"learning_rate": 0.00019580728994623195, |
|
"loss": 0.0305, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.36881559220389803, |
|
"grad_norm": 0.03835047499522843, |
|
"learning_rate": 0.00019573194975320673, |
|
"loss": 0.032, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.37031484257871067, |
|
"grad_norm": 0.03475293565575896, |
|
"learning_rate": 0.000195655953424117, |
|
"loss": 0.0376, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.37181409295352325, |
|
"grad_norm": 0.04896052744776511, |
|
"learning_rate": 0.00019557930147983302, |
|
"loss": 0.0398, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3733133433283358, |
|
"grad_norm": 0.04797369299914517, |
|
"learning_rate": 0.0001955019944457187, |
|
"loss": 0.0454, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3748125937031484, |
|
"grad_norm": 0.07781910571114095, |
|
"learning_rate": 0.0001954240328516277, |
|
"loss": 0.0698, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.37631184407796103, |
|
"grad_norm": 0.04338750887929876, |
|
"learning_rate": 0.0001953454172319001, |
|
"loss": 0.0456, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3778110944527736, |
|
"grad_norm": 0.04454900811666872, |
|
"learning_rate": 0.00019526614812535864, |
|
"loss": 0.0282, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3793103448275862, |
|
"grad_norm": 0.05266395324382892, |
|
"learning_rate": 0.0001951862260753048, |
|
"loss": 0.0672, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3808095952023988, |
|
"grad_norm": 0.057494318285817976, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 0.0731, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3823088455772114, |
|
"grad_norm": 0.04735808897837354, |
|
"learning_rate": 0.00019502442534023858, |
|
"loss": 0.0456, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.383808095952024, |
|
"grad_norm": 0.1006133724153584, |
|
"learning_rate": 0.0001949425477641904, |
|
"loss": 0.0872, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3853073463268366, |
|
"grad_norm": 0.02920756237692682, |
|
"learning_rate": 0.00019486001946255046, |
|
"loss": 0.0309, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3868065967016492, |
|
"grad_norm": 0.04990145786446464, |
|
"learning_rate": 0.0001947768410009586, |
|
"loss": 0.033, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.38830584707646176, |
|
"grad_norm": 0.050701365717084494, |
|
"learning_rate": 0.0001946930129495106, |
|
"loss": 0.0367, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.38980509745127434, |
|
"grad_norm": 0.03698562204942488, |
|
"learning_rate": 0.00019460853588275454, |
|
"loss": 0.034, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.391304347826087, |
|
"grad_norm": 0.07391856974626904, |
|
"learning_rate": 0.00019452341037968682, |
|
"loss": 0.0489, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.39280359820089955, |
|
"grad_norm": 0.05717733621263349, |
|
"learning_rate": 0.00019443763702374812, |
|
"loss": 0.0448, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.39430284857571213, |
|
"grad_norm": 0.044077421896152096, |
|
"learning_rate": 0.00019435121640281938, |
|
"loss": 0.0289, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.39580209895052476, |
|
"grad_norm": 0.07710766944383475, |
|
"learning_rate": 0.00019426414910921787, |
|
"loss": 0.062, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.39730134932533734, |
|
"grad_norm": 0.06386202461201915, |
|
"learning_rate": 0.00019417643573969303, |
|
"loss": 0.0545, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3988005997001499, |
|
"grad_norm": 0.035388492021471624, |
|
"learning_rate": 0.00019408807689542257, |
|
"loss": 0.0338, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4002998500749625, |
|
"grad_norm": 0.10927492127823649, |
|
"learning_rate": 0.00019399907318200802, |
|
"loss": 0.0893, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.4017991004497751, |
|
"grad_norm": 0.06012238034598342, |
|
"learning_rate": 0.0001939094252094709, |
|
"loss": 0.0527, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4032983508245877, |
|
"grad_norm": 0.03391978832858692, |
|
"learning_rate": 0.00019381913359224842, |
|
"loss": 0.0374, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4047976011994003, |
|
"grad_norm": 0.059440210121053584, |
|
"learning_rate": 0.00019372819894918915, |
|
"loss": 0.0709, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4062968515742129, |
|
"grad_norm": 0.05188231618768116, |
|
"learning_rate": 0.000193636621903549, |
|
"loss": 0.0683, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4077961019490255, |
|
"grad_norm": 0.04719133608642286, |
|
"learning_rate": 0.00019354440308298675, |
|
"loss": 0.0477, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.40929535232383807, |
|
"grad_norm": 0.07053359056135224, |
|
"learning_rate": 0.00019345154311955985, |
|
"loss": 0.0917, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4107946026986507, |
|
"grad_norm": 0.03312060738445713, |
|
"learning_rate": 0.00019335804264972018, |
|
"loss": 0.0342, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4122938530734633, |
|
"grad_norm": 0.05192332591417718, |
|
"learning_rate": 0.00019326390231430942, |
|
"loss": 0.0621, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.41379310344827586, |
|
"grad_norm": 0.06103626606110473, |
|
"learning_rate": 0.0001931691227585549, |
|
"loss": 0.0544, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.41529235382308843, |
|
"grad_norm": 0.035426733024569466, |
|
"learning_rate": 0.0001930737046320651, |
|
"loss": 0.0386, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.41679160419790107, |
|
"grad_norm": 0.03649997902640354, |
|
"learning_rate": 0.00019297764858882514, |
|
"loss": 0.0299, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.41829085457271364, |
|
"grad_norm": 0.044934938886158564, |
|
"learning_rate": 0.00019288095528719243, |
|
"loss": 0.043, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.4197901049475262, |
|
"grad_norm": 0.056516397554641565, |
|
"learning_rate": 0.000192783625389892, |
|
"loss": 0.0491, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.42128935532233885, |
|
"grad_norm": 0.03896533163368287, |
|
"learning_rate": 0.00019268565956401208, |
|
"loss": 0.0358, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.42278860569715143, |
|
"grad_norm": 0.04117282780665747, |
|
"learning_rate": 0.0001925870584809995, |
|
"loss": 0.046, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.424287856071964, |
|
"grad_norm": 0.3604260889224257, |
|
"learning_rate": 0.00019248782281665498, |
|
"loss": 0.1119, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.4257871064467766, |
|
"grad_norm": 0.04720518482542838, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.0427, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.4272863568215892, |
|
"grad_norm": 0.04236046102866595, |
|
"learning_rate": 0.00019228745046891542, |
|
"loss": 0.026, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4287856071964018, |
|
"grad_norm": 0.06797148449245759, |
|
"learning_rate": 0.00019218631515885006, |
|
"loss": 0.0449, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.4302848575712144, |
|
"grad_norm": 0.07816741282791993, |
|
"learning_rate": 0.00019208454801410266, |
|
"loss": 0.0696, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.431784107946027, |
|
"grad_norm": 0.05675040921166316, |
|
"learning_rate": 0.00019198214973217378, |
|
"loss": 0.0434, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.4332833583208396, |
|
"grad_norm": 0.06237548709245271, |
|
"learning_rate": 0.00019187912101488984, |
|
"loss": 0.0696, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 0.07002008527028293, |
|
"learning_rate": 0.00019177546256839812, |
|
"loss": 0.1012, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4362818590704648, |
|
"grad_norm": 0.04893068129720385, |
|
"learning_rate": 0.000191671175103162, |
|
"loss": 0.0409, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.43778110944527737, |
|
"grad_norm": 0.04601357359317086, |
|
"learning_rate": 0.00019156625933395614, |
|
"loss": 0.0519, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.43928035982008995, |
|
"grad_norm": 0.05337895694784327, |
|
"learning_rate": 0.00019146071597986138, |
|
"loss": 0.0372, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.4407796101949025, |
|
"grad_norm": 0.03995143875131533, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 0.0368, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.44227886056971516, |
|
"grad_norm": 0.09850621381875037, |
|
"learning_rate": 0.00019124774941483107, |
|
"loss": 0.0761, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.44377811094452774, |
|
"grad_norm": 0.038785537167516496, |
|
"learning_rate": 0.00019114032766354453, |
|
"loss": 0.041, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.4452773613193403, |
|
"grad_norm": 0.066120691647694, |
|
"learning_rate": 0.00019103228124665712, |
|
"loss": 0.0747, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.44677661169415295, |
|
"grad_norm": 0.11147859567607986, |
|
"learning_rate": 0.00019092361090470688, |
|
"loss": 0.0508, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.4482758620689655, |
|
"grad_norm": 0.09831159493729676, |
|
"learning_rate": 0.00019081431738250814, |
|
"loss": 0.0562, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.4497751124437781, |
|
"grad_norm": 0.07264006526009004, |
|
"learning_rate": 0.0001907044014291465, |
|
"loss": 0.0557, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4512743628185907, |
|
"grad_norm": 0.07996560398829375, |
|
"learning_rate": 0.0001905938637979736, |
|
"loss": 0.0482, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.4527736131934033, |
|
"grad_norm": 0.0686136548292036, |
|
"learning_rate": 0.00019048270524660196, |
|
"loss": 0.0503, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.4542728635682159, |
|
"grad_norm": 0.045762027803397255, |
|
"learning_rate": 0.00019037092653689987, |
|
"loss": 0.0385, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.45577211394302847, |
|
"grad_norm": 0.06709124946128789, |
|
"learning_rate": 0.00019025852843498607, |
|
"loss": 0.0678, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.4572713643178411, |
|
"grad_norm": 0.05610882393237923, |
|
"learning_rate": 0.00019014551171122457, |
|
"loss": 0.0482, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4587706146926537, |
|
"grad_norm": 0.06482964052411654, |
|
"learning_rate": 0.00019003187714021938, |
|
"loss": 0.0531, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.46026986506746626, |
|
"grad_norm": 0.07856511913650002, |
|
"learning_rate": 0.00018991762550080906, |
|
"loss": 0.0531, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.4617691154422789, |
|
"grad_norm": 0.08727827018786473, |
|
"learning_rate": 0.00018980275757606157, |
|
"loss": 0.0696, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.46326836581709147, |
|
"grad_norm": 0.054829790468602986, |
|
"learning_rate": 0.00018968727415326884, |
|
"loss": 0.0522, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.46476761619190404, |
|
"grad_norm": 0.0533722415191346, |
|
"learning_rate": 0.0001895711760239413, |
|
"loss": 0.0455, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4662668665667166, |
|
"grad_norm": 0.0404960196283668, |
|
"learning_rate": 0.0001894544639838025, |
|
"loss": 0.0432, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.46776611694152925, |
|
"grad_norm": 0.044028946635248145, |
|
"learning_rate": 0.00018933713883278376, |
|
"loss": 0.0425, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.46926536731634183, |
|
"grad_norm": 0.06293150170038057, |
|
"learning_rate": 0.00018921920137501849, |
|
"loss": 0.0508, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4707646176911544, |
|
"grad_norm": 0.06810220996600896, |
|
"learning_rate": 0.0001891006524188368, |
|
"loss": 0.0581, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.47226386806596704, |
|
"grad_norm": 0.06959342447747006, |
|
"learning_rate": 0.00018898149277675997, |
|
"loss": 0.0725, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4737631184407796, |
|
"grad_norm": 0.061826777912191176, |
|
"learning_rate": 0.0001888617232654949, |
|
"loss": 0.0598, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4752623688155922, |
|
"grad_norm": 0.066670766072721, |
|
"learning_rate": 0.00018874134470592835, |
|
"loss": 0.0387, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.4767616191904048, |
|
"grad_norm": 0.05284481473477404, |
|
"learning_rate": 0.00018862035792312147, |
|
"loss": 0.0353, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4782608695652174, |
|
"grad_norm": 0.03546158471045893, |
|
"learning_rate": 0.0001884987637463042, |
|
"loss": 0.0353, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.47976011994003, |
|
"grad_norm": 0.03695047012775239, |
|
"learning_rate": 0.00018837656300886937, |
|
"loss": 0.033, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.48125937031484256, |
|
"grad_norm": 0.09946531845424002, |
|
"learning_rate": 0.00018825375654836713, |
|
"loss": 0.068, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4827586206896552, |
|
"grad_norm": 0.0525789693495926, |
|
"learning_rate": 0.0001881303452064992, |
|
"loss": 0.0603, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.48425787106446777, |
|
"grad_norm": 0.061737118720852, |
|
"learning_rate": 0.00018800632982911322, |
|
"loss": 0.0606, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.48575712143928035, |
|
"grad_norm": 0.036553688772480226, |
|
"learning_rate": 0.00018788171126619653, |
|
"loss": 0.0299, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.487256371814093, |
|
"grad_norm": 0.04923145685241048, |
|
"learning_rate": 0.00018775649037187093, |
|
"loss": 0.0475, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.48875562218890556, |
|
"grad_norm": 0.06336340360281395, |
|
"learning_rate": 0.00018763066800438636, |
|
"loss": 0.0689, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.49025487256371814, |
|
"grad_norm": 0.044393763008170536, |
|
"learning_rate": 0.00018750424502611527, |
|
"loss": 0.0413, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4917541229385307, |
|
"grad_norm": 0.04138603160844389, |
|
"learning_rate": 0.00018737722230354655, |
|
"loss": 0.0445, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.49325337331334335, |
|
"grad_norm": 0.08068404550662704, |
|
"learning_rate": 0.00018724960070727972, |
|
"loss": 0.0507, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.4947526236881559, |
|
"grad_norm": 0.05448885894958413, |
|
"learning_rate": 0.00018712138111201895, |
|
"loss": 0.0581, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4962518740629685, |
|
"grad_norm": 0.0541786649288355, |
|
"learning_rate": 0.00018699256439656697, |
|
"loss": 0.05, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.49775112443778113, |
|
"grad_norm": 0.09333363889478805, |
|
"learning_rate": 0.00018686315144381913, |
|
"loss": 0.0634, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4992503748125937, |
|
"grad_norm": 0.06046721622165493, |
|
"learning_rate": 0.00018673314314075732, |
|
"loss": 0.0393, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5007496251874063, |
|
"grad_norm": 0.22984713729553005, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.1195, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5022488755622189, |
|
"grad_norm": 0.03840858370215027, |
|
"learning_rate": 0.0001864713440520155, |
|
"loss": 0.038, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5037481259370314, |
|
"grad_norm": 0.0460739408926456, |
|
"learning_rate": 0.00018633955506067718, |
|
"loss": 0.0277, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5052473763118441, |
|
"grad_norm": 0.058586326621381794, |
|
"learning_rate": 0.00018620717430769586, |
|
"loss": 0.0418, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5067466266866567, |
|
"grad_norm": 0.0691170892823961, |
|
"learning_rate": 0.0001860742027003944, |
|
"loss": 0.0611, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5082458770614693, |
|
"grad_norm": 0.054450391696896736, |
|
"learning_rate": 0.0001859406411501453, |
|
"loss": 0.0389, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5097451274362819, |
|
"grad_norm": 0.05989403502334338, |
|
"learning_rate": 0.00018580649057236447, |
|
"loss": 0.0656, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5112443778110944, |
|
"grad_norm": 0.05812418432414411, |
|
"learning_rate": 0.00018567175188650498, |
|
"loss": 0.0516, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.512743628185907, |
|
"grad_norm": 0.1024472803798587, |
|
"learning_rate": 0.00018553642601605068, |
|
"loss": 0.0489, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5142428785607196, |
|
"grad_norm": 0.06418199886991582, |
|
"learning_rate": 0.00018540051388850993, |
|
"loss": 0.0641, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.5157421289355323, |
|
"grad_norm": 0.05354265643676874, |
|
"learning_rate": 0.00018526401643540922, |
|
"loss": 0.0342, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.060234343510686926, |
|
"learning_rate": 0.00018512693459228684, |
|
"loss": 0.0441, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5187406296851574, |
|
"grad_norm": 0.05502477922945836, |
|
"learning_rate": 0.00018498926929868642, |
|
"loss": 0.044, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.52023988005997, |
|
"grad_norm": 0.05316566519953199, |
|
"learning_rate": 0.00018485102149815038, |
|
"loss": 0.0407, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 0.08251700451700679, |
|
"learning_rate": 0.00018471219213821375, |
|
"loss": 0.0475, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.5232383808095952, |
|
"grad_norm": 0.044332263163009075, |
|
"learning_rate": 0.00018457278217039736, |
|
"loss": 0.0339, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5247376311844077, |
|
"grad_norm": 0.06598873060964301, |
|
"learning_rate": 0.00018443279255020152, |
|
"loss": 0.0623, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5262368815592204, |
|
"grad_norm": 0.09432658184269146, |
|
"learning_rate": 0.00018429222423709947, |
|
"loss": 0.0681, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.527736131934033, |
|
"grad_norm": 0.048137123345461545, |
|
"learning_rate": 0.00018415107819453062, |
|
"loss": 0.0379, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5292353823088456, |
|
"grad_norm": 0.048726928699549, |
|
"learning_rate": 0.0001840093553898942, |
|
"loss": 0.0517, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5307346326836582, |
|
"grad_norm": 0.03886288744382217, |
|
"learning_rate": 0.00018386705679454242, |
|
"loss": 0.0309, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5322338830584707, |
|
"grad_norm": 0.04487311032788653, |
|
"learning_rate": 0.0001837241833837739, |
|
"loss": 0.0294, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5337331334332833, |
|
"grad_norm": 0.03798327068043393, |
|
"learning_rate": 0.00018358073613682706, |
|
"loss": 0.031, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.5352323838080959, |
|
"grad_norm": 0.050882637106700504, |
|
"learning_rate": 0.00018343671603687317, |
|
"loss": 0.039, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5367316341829086, |
|
"grad_norm": 0.044214998902001985, |
|
"learning_rate": 0.00018329212407100994, |
|
"loss": 0.0333, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5382308845577212, |
|
"grad_norm": 0.06411346885005922, |
|
"learning_rate": 0.00018314696123025454, |
|
"loss": 0.066, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.5397301349325337, |
|
"grad_norm": 0.042800963513781314, |
|
"learning_rate": 0.00018300122850953675, |
|
"loss": 0.0303, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5412293853073463, |
|
"grad_norm": 0.051572469675010446, |
|
"learning_rate": 0.00018285492690769237, |
|
"loss": 0.0459, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.5427286356821589, |
|
"grad_norm": 0.06594725487157567, |
|
"learning_rate": 0.00018270805742745617, |
|
"loss": 0.0554, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.5442278860569715, |
|
"grad_norm": 0.08966668405728594, |
|
"learning_rate": 0.00018256062107545518, |
|
"loss": 0.0504, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.545727136431784, |
|
"grad_norm": 0.05705712395021745, |
|
"learning_rate": 0.00018241261886220154, |
|
"loss": 0.0542, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5472263868065967, |
|
"grad_norm": 0.046841858897893975, |
|
"learning_rate": 0.000182264051802086, |
|
"loss": 0.046, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5487256371814093, |
|
"grad_norm": 0.05043321212035937, |
|
"learning_rate": 0.00018211492091337042, |
|
"loss": 0.0515, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.5502248875562219, |
|
"grad_norm": 0.05643206918749337, |
|
"learning_rate": 0.00018196522721818128, |
|
"loss": 0.0398, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 0.10718050662244094, |
|
"learning_rate": 0.00018181497174250236, |
|
"loss": 0.0234, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.553223388305847, |
|
"grad_norm": 0.07740102244845123, |
|
"learning_rate": 0.00018166415551616792, |
|
"loss": 0.0523, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.5547226386806596, |
|
"grad_norm": 0.06095682806386288, |
|
"learning_rate": 0.00018151277957285543, |
|
"loss": 0.0434, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5562218890554723, |
|
"grad_norm": 0.06932915829661977, |
|
"learning_rate": 0.00018136084495007872, |
|
"loss": 0.0531, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5577211394302849, |
|
"grad_norm": 0.04400477443557516, |
|
"learning_rate": 0.00018120835268918063, |
|
"loss": 0.0387, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.5592203898050975, |
|
"grad_norm": 0.05568457669143035, |
|
"learning_rate": 0.00018105530383532607, |
|
"loss": 0.0359, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.56071964017991, |
|
"grad_norm": 0.09375047939362961, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 0.0613, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.5622188905547226, |
|
"grad_norm": 0.09342104698058959, |
|
"learning_rate": 0.000180747540548474, |
|
"loss": 0.0508, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5637181409295352, |
|
"grad_norm": 0.06980360470280776, |
|
"learning_rate": 0.00018059282822485158, |
|
"loss": 0.0588, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.5652173913043478, |
|
"grad_norm": 0.04834116988503355, |
|
"learning_rate": 0.00018043756352700846, |
|
"loss": 0.0407, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.5667166416791605, |
|
"grad_norm": 0.0559433504674298, |
|
"learning_rate": 0.00018028174751911146, |
|
"loss": 0.0487, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.568215892053973, |
|
"grad_norm": 0.04299769695781603, |
|
"learning_rate": 0.00018012538126910608, |
|
"loss": 0.0289, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.5697151424287856, |
|
"grad_norm": 0.061484741377732285, |
|
"learning_rate": 0.00017996846584870908, |
|
"loss": 0.0545, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5712143928035982, |
|
"grad_norm": 0.04596560988589707, |
|
"learning_rate": 0.00017981100233340115, |
|
"loss": 0.0381, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.5727136431784108, |
|
"grad_norm": 0.03818801182308544, |
|
"learning_rate": 0.00017965299180241963, |
|
"loss": 0.0348, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.5742128935532234, |
|
"grad_norm": 0.050232900629482914, |
|
"learning_rate": 0.000179494435338751, |
|
"loss": 0.0265, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.5757121439280359, |
|
"grad_norm": 0.0740772370322547, |
|
"learning_rate": 0.00017933533402912354, |
|
"loss": 0.0629, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.5772113943028486, |
|
"grad_norm": 0.03602072499212665, |
|
"learning_rate": 0.00017917568896399973, |
|
"loss": 0.0303, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5787106446776612, |
|
"grad_norm": 0.04732712114588865, |
|
"learning_rate": 0.00017901550123756906, |
|
"loss": 0.0533, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.5802098950524738, |
|
"grad_norm": 0.0438295195621145, |
|
"learning_rate": 0.0001788547719477402, |
|
"loss": 0.033, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.5817091454272864, |
|
"grad_norm": 0.045622862481622464, |
|
"learning_rate": 0.00017869350219613375, |
|
"loss": 0.0436, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.5832083958020989, |
|
"grad_norm": 0.04964994949451908, |
|
"learning_rate": 0.00017853169308807448, |
|
"loss": 0.0375, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.5847076461769115, |
|
"grad_norm": 0.05078556363206241, |
|
"learning_rate": 0.000178369345732584, |
|
"loss": 0.0345, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5862068965517241, |
|
"grad_norm": 0.05074730082524994, |
|
"learning_rate": 0.00017820646124237278, |
|
"loss": 0.0364, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5877061469265368, |
|
"grad_norm": 0.04603113674846868, |
|
"learning_rate": 0.000178043040733833, |
|
"loss": 0.0302, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5892053973013494, |
|
"grad_norm": 0.058291821623076764, |
|
"learning_rate": 0.00017787908532703042, |
|
"loss": 0.0538, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5907046476761619, |
|
"grad_norm": 0.06209286221938147, |
|
"learning_rate": 0.0001777145961456971, |
|
"loss": 0.0437, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5922038980509745, |
|
"grad_norm": 0.035508544978000475, |
|
"learning_rate": 0.00017754957431722346, |
|
"loss": 0.0361, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5937031484257871, |
|
"grad_norm": 0.045021307147078936, |
|
"learning_rate": 0.00017738402097265064, |
|
"loss": 0.0314, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5952023988005997, |
|
"grad_norm": 0.045117529238231965, |
|
"learning_rate": 0.00017721793724666268, |
|
"loss": 0.0286, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5967016491754122, |
|
"grad_norm": 0.04284437809907477, |
|
"learning_rate": 0.00017705132427757895, |
|
"loss": 0.0316, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5982008995502249, |
|
"grad_norm": 0.05363788525676639, |
|
"learning_rate": 0.00017688418320734598, |
|
"loss": 0.0575, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5997001499250375, |
|
"grad_norm": 0.039147054068539446, |
|
"learning_rate": 0.00017671651518153, |
|
"loss": 0.0298, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6011994002998501, |
|
"grad_norm": 0.047103082273761164, |
|
"learning_rate": 0.00017654832134930882, |
|
"loss": 0.0332, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6026986506746627, |
|
"grad_norm": 0.03434268067696955, |
|
"learning_rate": 0.00017637960286346425, |
|
"loss": 0.0224, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6041979010494752, |
|
"grad_norm": 0.05227048698626022, |
|
"learning_rate": 0.00017621036088037378, |
|
"loss": 0.0518, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6056971514242878, |
|
"grad_norm": 0.18818259332861206, |
|
"learning_rate": 0.0001760405965600031, |
|
"loss": 0.0758, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6071964017991005, |
|
"grad_norm": 0.05885735282398511, |
|
"learning_rate": 0.00017587031106589782, |
|
"loss": 0.0361, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6086956521739131, |
|
"grad_norm": 0.08962878091622765, |
|
"learning_rate": 0.00017569950556517566, |
|
"loss": 0.0523, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6101949025487257, |
|
"grad_norm": 0.07406938968015182, |
|
"learning_rate": 0.00017552818122851838, |
|
"loss": 0.0372, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6116941529235382, |
|
"grad_norm": 0.0589541107505507, |
|
"learning_rate": 0.0001753563392301638, |
|
"loss": 0.0518, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.6131934032983508, |
|
"grad_norm": 0.0493031824176551, |
|
"learning_rate": 0.00017518398074789775, |
|
"loss": 0.0404, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6146926536731634, |
|
"grad_norm": 0.060948463938890016, |
|
"learning_rate": 0.00017501110696304596, |
|
"loss": 0.0434, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.616191904047976, |
|
"grad_norm": 0.05214572496276531, |
|
"learning_rate": 0.00017483771906046602, |
|
"loss": 0.0384, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6176911544227887, |
|
"grad_norm": 0.05632303506730485, |
|
"learning_rate": 0.00017466381822853915, |
|
"loss": 0.032, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.6191904047976012, |
|
"grad_norm": 0.06363985932901699, |
|
"learning_rate": 0.00017448940565916222, |
|
"loss": 0.0532, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6206896551724138, |
|
"grad_norm": 0.045655772233298454, |
|
"learning_rate": 0.00017431448254773944, |
|
"loss": 0.0335, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6221889055472264, |
|
"grad_norm": 0.05496397917447876, |
|
"learning_rate": 0.00017413905009317423, |
|
"loss": 0.0432, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.623688155922039, |
|
"grad_norm": 0.0542650773216564, |
|
"learning_rate": 0.000173963109497861, |
|
"loss": 0.0299, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.6251874062968515, |
|
"grad_norm": 0.053416335915512446, |
|
"learning_rate": 0.00017378666196767685, |
|
"loss": 0.0384, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.6266866566716641, |
|
"grad_norm": 0.053317686220209964, |
|
"learning_rate": 0.00017360970871197346, |
|
"loss": 0.0355, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.6281859070464768, |
|
"grad_norm": 0.058288847449129474, |
|
"learning_rate": 0.00017343225094356855, |
|
"loss": 0.052, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6296851574212894, |
|
"grad_norm": 0.042009952684143474, |
|
"learning_rate": 0.0001732542898787379, |
|
"loss": 0.0452, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.631184407796102, |
|
"grad_norm": 0.04265378963968585, |
|
"learning_rate": 0.00017307582673720663, |
|
"loss": 0.0314, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.6326836581709145, |
|
"grad_norm": 0.0473560166231087, |
|
"learning_rate": 0.00017289686274214118, |
|
"loss": 0.0481, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.6341829085457271, |
|
"grad_norm": 0.04316822944816478, |
|
"learning_rate": 0.00017271739912014068, |
|
"loss": 0.035, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.6356821589205397, |
|
"grad_norm": 0.04821805663840275, |
|
"learning_rate": 0.00017253743710122875, |
|
"loss": 0.0353, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.6371814092953523, |
|
"grad_norm": 0.05766150770706173, |
|
"learning_rate": 0.00017235697791884494, |
|
"loss": 0.0518, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.638680659670165, |
|
"grad_norm": 0.09048982025586255, |
|
"learning_rate": 0.00017217602280983623, |
|
"loss": 0.0477, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.6401799100449775, |
|
"grad_norm": 0.04016296087640653, |
|
"learning_rate": 0.00017199457301444868, |
|
"loss": 0.0284, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.6416791604197901, |
|
"grad_norm": 0.07151231389242098, |
|
"learning_rate": 0.00017181262977631888, |
|
"loss": 0.0534, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.6431784107946027, |
|
"grad_norm": 0.0521164376523254, |
|
"learning_rate": 0.00017163019434246547, |
|
"loss": 0.0413, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.6446776611694153, |
|
"grad_norm": 0.06650764446753712, |
|
"learning_rate": 0.00017144726796328034, |
|
"loss": 0.0532, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6461769115442278, |
|
"grad_norm": 0.05631153402061295, |
|
"learning_rate": 0.00017126385189252053, |
|
"loss": 0.04, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.6476761619190404, |
|
"grad_norm": 0.07908703283560595, |
|
"learning_rate": 0.00017107994738729926, |
|
"loss": 0.0546, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.6491754122938531, |
|
"grad_norm": 0.053983476730184235, |
|
"learning_rate": 0.00017089555570807736, |
|
"loss": 0.0428, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.6506746626686657, |
|
"grad_norm": 0.04223485749838613, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.039, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.6521739130434783, |
|
"grad_norm": 0.05289448745150468, |
|
"learning_rate": 0.00017052531588616178, |
|
"loss": 0.0369, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6536731634182908, |
|
"grad_norm": 0.04254188142407335, |
|
"learning_rate": 0.00017033947028105039, |
|
"loss": 0.0482, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.6551724137931034, |
|
"grad_norm": 0.03642799958156187, |
|
"learning_rate": 0.0001701531425770856, |
|
"loss": 0.0227, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.656671664167916, |
|
"grad_norm": 0.044290202162954866, |
|
"learning_rate": 0.00016996633405133655, |
|
"loss": 0.0317, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.6581709145427287, |
|
"grad_norm": 0.03328713625145707, |
|
"learning_rate": 0.00016977904598416803, |
|
"loss": 0.0193, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.6596701649175413, |
|
"grad_norm": 0.06825872867733464, |
|
"learning_rate": 0.00016959127965923142, |
|
"loss": 0.0524, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6611694152923538, |
|
"grad_norm": 0.05737656538911755, |
|
"learning_rate": 0.00016940303636345618, |
|
"loss": 0.0384, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.6626686656671664, |
|
"grad_norm": 0.05217169033909698, |
|
"learning_rate": 0.0001692143173870407, |
|
"loss": 0.0398, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.664167916041979, |
|
"grad_norm": 0.08029746938521902, |
|
"learning_rate": 0.00016902512402344373, |
|
"loss": 0.0996, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.6656671664167916, |
|
"grad_norm": 0.05023109850591579, |
|
"learning_rate": 0.0001688354575693754, |
|
"loss": 0.0444, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.6671664167916042, |
|
"grad_norm": 0.05007534396139674, |
|
"learning_rate": 0.00016864531932478838, |
|
"loss": 0.0495, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6686656671664168, |
|
"grad_norm": 0.06850886410730117, |
|
"learning_rate": 0.00016845471059286887, |
|
"loss": 0.0491, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.6701649175412294, |
|
"grad_norm": 0.0709049222949615, |
|
"learning_rate": 0.00016826363268002782, |
|
"loss": 0.086, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.671664167916042, |
|
"grad_norm": 0.07625185163942867, |
|
"learning_rate": 0.0001680720868958918, |
|
"loss": 0.0612, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.6731634182908546, |
|
"grad_norm": 0.03903647995204454, |
|
"learning_rate": 0.0001678800745532942, |
|
"loss": 0.0286, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.6746626686656672, |
|
"grad_norm": 0.04875227954727952, |
|
"learning_rate": 0.00016768759696826608, |
|
"loss": 0.0452, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6761619190404797, |
|
"grad_norm": 0.045509076234092845, |
|
"learning_rate": 0.0001674946554600273, |
|
"loss": 0.0401, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.6776611694152923, |
|
"grad_norm": 0.038954973560474074, |
|
"learning_rate": 0.00016730125135097735, |
|
"loss": 0.0428, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.679160419790105, |
|
"grad_norm": 0.05064178738289288, |
|
"learning_rate": 0.00016710738596668632, |
|
"loss": 0.0481, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.6806596701649176, |
|
"grad_norm": 0.03652262635297044, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 0.0341, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.6821589205397302, |
|
"grad_norm": 0.0469218301221786, |
|
"learning_rate": 0.00016671827669045998, |
|
"loss": 0.0388, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6836581709145427, |
|
"grad_norm": 0.05748273212579017, |
|
"learning_rate": 0.00016652303546543608, |
|
"loss": 0.0424, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.6851574212893553, |
|
"grad_norm": 0.029734098462886188, |
|
"learning_rate": 0.00016632733829897566, |
|
"loss": 0.0221, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.6866566716641679, |
|
"grad_norm": 0.03162041588945904, |
|
"learning_rate": 0.00016613118653236518, |
|
"loss": 0.0284, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.6881559220389805, |
|
"grad_norm": 0.0770559271143547, |
|
"learning_rate": 0.00016593458151000688, |
|
"loss": 0.0647, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.0806809324128278, |
|
"learning_rate": 0.0001657375245794096, |
|
"loss": 0.0507, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6911544227886057, |
|
"grad_norm": 0.0623039707244334, |
|
"learning_rate": 0.0001655400170911794, |
|
"loss": 0.0443, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.6926536731634183, |
|
"grad_norm": 0.04046812960090323, |
|
"learning_rate": 0.00016534206039901057, |
|
"loss": 0.047, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.6941529235382309, |
|
"grad_norm": 0.09437551047944975, |
|
"learning_rate": 0.00016514365585967603, |
|
"loss": 0.0626, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 0.07162777105996744, |
|
"learning_rate": 0.00016494480483301836, |
|
"loss": 0.0503, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.697151424287856, |
|
"grad_norm": 0.04109223749911729, |
|
"learning_rate": 0.00016474550868194023, |
|
"loss": 0.0315, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6986506746626686, |
|
"grad_norm": 0.06077734184621873, |
|
"learning_rate": 0.00016454576877239507, |
|
"loss": 0.0422, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7001499250374813, |
|
"grad_norm": 0.19884111549839148, |
|
"learning_rate": 0.0001643455864733779, |
|
"loss": 0.0926, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7016491754122939, |
|
"grad_norm": 0.04642784378882251, |
|
"learning_rate": 0.00016414496315691581, |
|
"loss": 0.0385, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.7031484257871065, |
|
"grad_norm": 0.4393223814451174, |
|
"learning_rate": 0.00016394390019805848, |
|
"loss": 0.0993, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.704647676161919, |
|
"grad_norm": 0.07482643775181062, |
|
"learning_rate": 0.000163742398974869, |
|
"loss": 0.0489, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7061469265367316, |
|
"grad_norm": 0.04885944436540397, |
|
"learning_rate": 0.0001635404608684141, |
|
"loss": 0.0348, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.7076461769115442, |
|
"grad_norm": 0.049664390973816655, |
|
"learning_rate": 0.000163338087262755, |
|
"loss": 0.0391, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7091454272863568, |
|
"grad_norm": 0.09885583055337595, |
|
"learning_rate": 0.00016313527954493778, |
|
"loss": 0.0642, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7106446776611695, |
|
"grad_norm": 0.06584763072102895, |
|
"learning_rate": 0.00016293203910498376, |
|
"loss": 0.0446, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.712143928035982, |
|
"grad_norm": 0.052090571949195315, |
|
"learning_rate": 0.00016272836733588017, |
|
"loss": 0.0449, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7136431784107946, |
|
"grad_norm": 0.060369511284079136, |
|
"learning_rate": 0.00016252426563357055, |
|
"loss": 0.0498, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.7151424287856072, |
|
"grad_norm": 0.07350090323203447, |
|
"learning_rate": 0.00016231973539694504, |
|
"loss": 0.0594, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.7166416791604198, |
|
"grad_norm": 0.07477778304256645, |
|
"learning_rate": 0.00016211477802783103, |
|
"loss": 0.0339, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.7181409295352323, |
|
"grad_norm": 0.044909007459069075, |
|
"learning_rate": 0.00016190939493098344, |
|
"loss": 0.0379, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.719640179910045, |
|
"grad_norm": 0.062190968309072295, |
|
"learning_rate": 0.00016170358751407487, |
|
"loss": 0.0758, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7211394302848576, |
|
"grad_norm": 0.05377348813315943, |
|
"learning_rate": 0.00016149735718768644, |
|
"loss": 0.0329, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.7226386806596702, |
|
"grad_norm": 0.053690857438836906, |
|
"learning_rate": 0.00016129070536529766, |
|
"loss": 0.037, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.7241379310344828, |
|
"grad_norm": 0.053806227909165436, |
|
"learning_rate": 0.000161083633463277, |
|
"loss": 0.0429, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.7256371814092953, |
|
"grad_norm": 0.058550946973171134, |
|
"learning_rate": 0.00016087614290087208, |
|
"loss": 0.0573, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.7271364317841079, |
|
"grad_norm": 0.05208602315147625, |
|
"learning_rate": 0.00016066823510019998, |
|
"loss": 0.0322, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7286356821589205, |
|
"grad_norm": 0.048780008222607175, |
|
"learning_rate": 0.0001604599114862375, |
|
"loss": 0.0391, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.7301349325337332, |
|
"grad_norm": 0.04257645362681531, |
|
"learning_rate": 0.00016025117348681132, |
|
"loss": 0.0326, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.7316341829085458, |
|
"grad_norm": 0.049273729257806875, |
|
"learning_rate": 0.00016004202253258842, |
|
"loss": 0.0446, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.7331334332833583, |
|
"grad_norm": 0.05564516630711474, |
|
"learning_rate": 0.00015983246005706593, |
|
"loss": 0.0567, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.7346326836581709, |
|
"grad_norm": 0.05015891963593426, |
|
"learning_rate": 0.0001596224874965616, |
|
"loss": 0.0365, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7361319340329835, |
|
"grad_norm": 0.04227849702908071, |
|
"learning_rate": 0.00015941210629020388, |
|
"loss": 0.0278, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.7376311844077961, |
|
"grad_norm": 0.09190608644561755, |
|
"learning_rate": 0.00015920131787992197, |
|
"loss": 0.1024, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7391304347826086, |
|
"grad_norm": 0.11304924052426712, |
|
"learning_rate": 0.00015899012371043604, |
|
"loss": 0.0675, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.7406296851574213, |
|
"grad_norm": 0.05613518703388969, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 0.0407, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.7421289355322339, |
|
"grad_norm": 0.04754947301267445, |
|
"learning_rate": 0.00015856652388662808, |
|
"loss": 0.0305, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.7436281859070465, |
|
"grad_norm": 0.0487917467215864, |
|
"learning_rate": 0.00015835412113561175, |
|
"loss": 0.0372, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.7451274362818591, |
|
"grad_norm": 0.056051719216029486, |
|
"learning_rate": 0.00015814131843198308, |
|
"loss": 0.033, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.7466266866566716, |
|
"grad_norm": 0.08355105349400162, |
|
"learning_rate": 0.0001579281172342679, |
|
"loss": 0.0618, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.7481259370314842, |
|
"grad_norm": 0.09016322211305738, |
|
"learning_rate": 0.0001577145190037234, |
|
"loss": 0.047, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.7496251874062968, |
|
"grad_norm": 0.04982867112956479, |
|
"learning_rate": 0.00015750052520432787, |
|
"loss": 0.0313, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7511244377811095, |
|
"grad_norm": 0.04381518020575622, |
|
"learning_rate": 0.0001572861373027709, |
|
"loss": 0.0348, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.7526236881559221, |
|
"grad_norm": 0.05899789350925751, |
|
"learning_rate": 0.0001570713567684432, |
|
"loss": 0.0503, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.7541229385307346, |
|
"grad_norm": 0.06316265954005644, |
|
"learning_rate": 0.0001568561850734264, |
|
"loss": 0.0291, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.7556221889055472, |
|
"grad_norm": 0.08632078705291048, |
|
"learning_rate": 0.00015664062369248328, |
|
"loss": 0.0639, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.7571214392803598, |
|
"grad_norm": 0.050182212371613015, |
|
"learning_rate": 0.00015642467410304742, |
|
"loss": 0.0305, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7586206896551724, |
|
"grad_norm": 0.07158241120198346, |
|
"learning_rate": 0.00015620833778521307, |
|
"loss": 0.0573, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.760119940029985, |
|
"grad_norm": 0.08175078694638557, |
|
"learning_rate": 0.00015599161622172517, |
|
"loss": 0.0462, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.7616191904047976, |
|
"grad_norm": 0.0475536897913194, |
|
"learning_rate": 0.00015577451089796905, |
|
"loss": 0.0501, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.7631184407796102, |
|
"grad_norm": 0.05682015512029848, |
|
"learning_rate": 0.00015555702330196023, |
|
"loss": 0.0313, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.7646176911544228, |
|
"grad_norm": 0.06137429357639015, |
|
"learning_rate": 0.00015533915492433443, |
|
"loss": 0.0519, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7661169415292354, |
|
"grad_norm": 0.038591640884982854, |
|
"learning_rate": 0.00015512090725833705, |
|
"loss": 0.0414, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.767616191904048, |
|
"grad_norm": 0.055149664940849136, |
|
"learning_rate": 0.0001549022817998132, |
|
"loss": 0.0318, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.7691154422788605, |
|
"grad_norm": 0.04445225405210936, |
|
"learning_rate": 0.00015468328004719726, |
|
"loss": 0.041, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.7706146926536732, |
|
"grad_norm": 0.05691241429851254, |
|
"learning_rate": 0.00015446390350150273, |
|
"loss": 0.0379, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.7721139430284858, |
|
"grad_norm": 0.05761804723554256, |
|
"learning_rate": 0.00015424415366631188, |
|
"loss": 0.0363, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7736131934032984, |
|
"grad_norm": 0.04929119952169534, |
|
"learning_rate": 0.0001540240320477655, |
|
"loss": 0.0426, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.775112443778111, |
|
"grad_norm": 0.044305931962162155, |
|
"learning_rate": 0.0001538035401545525, |
|
"loss": 0.0422, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.7766116941529235, |
|
"grad_norm": 0.06205510180982029, |
|
"learning_rate": 0.00015358267949789966, |
|
"loss": 0.0603, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.7781109445277361, |
|
"grad_norm": 0.07453649068372155, |
|
"learning_rate": 0.00015336145159156115, |
|
"loss": 0.0543, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.7796101949025487, |
|
"grad_norm": 0.0378467187255617, |
|
"learning_rate": 0.00015313985795180828, |
|
"loss": 0.04, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7811094452773614, |
|
"grad_norm": 0.057596547352352555, |
|
"learning_rate": 0.00015291790009741907, |
|
"loss": 0.0451, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.782608695652174, |
|
"grad_norm": 0.04081926428494982, |
|
"learning_rate": 0.00015269557954966778, |
|
"loss": 0.0288, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.7841079460269865, |
|
"grad_norm": 0.041887534030808915, |
|
"learning_rate": 0.00015247289783231453, |
|
"loss": 0.035, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.7856071964017991, |
|
"grad_norm": 0.06860023926519257, |
|
"learning_rate": 0.0001522498564715949, |
|
"loss": 0.049, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.7871064467766117, |
|
"grad_norm": 0.04260551619067463, |
|
"learning_rate": 0.0001520264569962094, |
|
"loss": 0.0427, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.7886056971514243, |
|
"grad_norm": 0.03773306965490992, |
|
"learning_rate": 0.00015180270093731303, |
|
"loss": 0.0245, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.7901049475262368, |
|
"grad_norm": 0.03431124298471051, |
|
"learning_rate": 0.00015157858982850475, |
|
"loss": 0.0289, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.7916041979010495, |
|
"grad_norm": 0.07692552283154377, |
|
"learning_rate": 0.00015135412520581702, |
|
"loss": 0.0661, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.7931034482758621, |
|
"grad_norm": 0.0347019843571733, |
|
"learning_rate": 0.0001511293086077052, |
|
"loss": 0.0267, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.7946026986506747, |
|
"grad_norm": 0.05889548183130433, |
|
"learning_rate": 0.00015090414157503714, |
|
"loss": 0.0321, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7961019490254873, |
|
"grad_norm": 0.07066481807133856, |
|
"learning_rate": 0.0001506786256510824, |
|
"loss": 0.052, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.7976011994002998, |
|
"grad_norm": 0.06186361286136524, |
|
"learning_rate": 0.00015045276238150192, |
|
"loss": 0.0479, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.7991004497751124, |
|
"grad_norm": 0.061605402385295015, |
|
"learning_rate": 0.00015022655331433727, |
|
"loss": 0.0523, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.800599700149925, |
|
"grad_norm": 0.04168389823340015, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.0293, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.8020989505247377, |
|
"grad_norm": 0.04409075971831587, |
|
"learning_rate": 0.00014977310399126123, |
|
"loss": 0.0294, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8035982008995503, |
|
"grad_norm": 0.03449538576446893, |
|
"learning_rate": 0.00014954586684324078, |
|
"loss": 0.0273, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.8050974512743628, |
|
"grad_norm": 0.06487922260997829, |
|
"learning_rate": 0.00014931829011339659, |
|
"loss": 0.1013, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.8065967016491754, |
|
"grad_norm": 0.04080309597694205, |
|
"learning_rate": 0.00014909037536151409, |
|
"loss": 0.0271, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.808095952023988, |
|
"grad_norm": 0.039495017673297265, |
|
"learning_rate": 0.00014886212414969553, |
|
"loss": 0.0274, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8095952023988006, |
|
"grad_norm": 0.08062160395086124, |
|
"learning_rate": 0.00014863353804234905, |
|
"loss": 0.0417, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8110944527736131, |
|
"grad_norm": 0.05152809022416075, |
|
"learning_rate": 0.00014840461860617834, |
|
"loss": 0.0359, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.8125937031484258, |
|
"grad_norm": 0.04922307205252768, |
|
"learning_rate": 0.00014817536741017152, |
|
"loss": 0.0308, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.8140929535232384, |
|
"grad_norm": 0.07044648065491671, |
|
"learning_rate": 0.00014794578602559067, |
|
"loss": 0.0566, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.815592203898051, |
|
"grad_norm": 0.0465206984979631, |
|
"learning_rate": 0.00014771587602596084, |
|
"loss": 0.0513, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.8170914542728636, |
|
"grad_norm": 0.09746747148479479, |
|
"learning_rate": 0.00014748563898705946, |
|
"loss": 0.0377, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.8185907046476761, |
|
"grad_norm": 0.050647534375834816, |
|
"learning_rate": 0.00014725507648690543, |
|
"loss": 0.048, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.8200899550224887, |
|
"grad_norm": 0.05429153224198516, |
|
"learning_rate": 0.00014702419010574825, |
|
"loss": 0.0523, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.8215892053973014, |
|
"grad_norm": 0.036339453217671985, |
|
"learning_rate": 0.00014679298142605734, |
|
"loss": 0.0233, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.823088455772114, |
|
"grad_norm": 0.08317522768191385, |
|
"learning_rate": 0.00014656145203251114, |
|
"loss": 0.0619, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.8245877061469266, |
|
"grad_norm": 0.04231199661380915, |
|
"learning_rate": 0.00014632960351198618, |
|
"loss": 0.0477, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8260869565217391, |
|
"grad_norm": 0.1779970750798073, |
|
"learning_rate": 0.00014609743745354624, |
|
"loss": 0.0978, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 0.06735162872665944, |
|
"learning_rate": 0.00014586495544843152, |
|
"loss": 0.0435, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.8290854572713643, |
|
"grad_norm": 0.06794928340973848, |
|
"learning_rate": 0.0001456321590900476, |
|
"loss": 0.0548, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.8305847076461769, |
|
"grad_norm": 0.040691650099011545, |
|
"learning_rate": 0.00014539904997395468, |
|
"loss": 0.0307, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.8320839580209896, |
|
"grad_norm": 0.06310347101508698, |
|
"learning_rate": 0.00014516562969785657, |
|
"loss": 0.0518, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.8335832083958021, |
|
"grad_norm": 0.06600280603320972, |
|
"learning_rate": 0.00014493189986158965, |
|
"loss": 0.0283, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.8350824587706147, |
|
"grad_norm": 0.048741740097207054, |
|
"learning_rate": 0.00014469786206711214, |
|
"loss": 0.0369, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.8365817091454273, |
|
"grad_norm": 0.0541340846516407, |
|
"learning_rate": 0.00014446351791849276, |
|
"loss": 0.0408, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.8380809595202399, |
|
"grad_norm": 0.06569812050813671, |
|
"learning_rate": 0.00014422886902190014, |
|
"loss": 0.0559, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.8395802098950524, |
|
"grad_norm": 0.07697796259986985, |
|
"learning_rate": 0.00014399391698559152, |
|
"loss": 0.0359, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.841079460269865, |
|
"grad_norm": 0.043380007173197774, |
|
"learning_rate": 0.00014375866341990187, |
|
"loss": 0.0307, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.8425787106446777, |
|
"grad_norm": 0.06012928930562709, |
|
"learning_rate": 0.00014352310993723277, |
|
"loss": 0.0359, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.8440779610194903, |
|
"grad_norm": 0.05916041881920503, |
|
"learning_rate": 0.00014328725815204144, |
|
"loss": 0.0451, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.8455772113943029, |
|
"grad_norm": 0.04584559728661184, |
|
"learning_rate": 0.00014305110968082952, |
|
"loss": 0.0295, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.8470764617691154, |
|
"grad_norm": 0.048484148060844064, |
|
"learning_rate": 0.0001428146661421323, |
|
"loss": 0.0413, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.848575712143928, |
|
"grad_norm": 0.051087086585204396, |
|
"learning_rate": 0.00014257792915650728, |
|
"loss": 0.0377, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.8500749625187406, |
|
"grad_norm": 0.04873404112533848, |
|
"learning_rate": 0.00014234090034652324, |
|
"loss": 0.0426, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.8515742128935532, |
|
"grad_norm": 0.05766048192964452, |
|
"learning_rate": 0.00014210358133674912, |
|
"loss": 0.033, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.8530734632683659, |
|
"grad_norm": 0.054308917320710616, |
|
"learning_rate": 0.0001418659737537428, |
|
"loss": 0.0379, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.8545727136431784, |
|
"grad_norm": 0.0460844103779593, |
|
"learning_rate": 0.00014162807922604012, |
|
"loss": 0.0289, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.856071964017991, |
|
"grad_norm": 0.04438781665413021, |
|
"learning_rate": 0.00014138989938414348, |
|
"loss": 0.0265, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.8575712143928036, |
|
"grad_norm": 0.1327712159880055, |
|
"learning_rate": 0.00014115143586051088, |
|
"loss": 0.0676, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.8590704647676162, |
|
"grad_norm": 0.050894530499255325, |
|
"learning_rate": 0.00014091269028954458, |
|
"loss": 0.0438, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.8605697151424287, |
|
"grad_norm": 0.05468020442615518, |
|
"learning_rate": 0.00014067366430758004, |
|
"loss": 0.042, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.09435379755279395, |
|
"learning_rate": 0.00014043435955287452, |
|
"loss": 0.0599, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.863568215892054, |
|
"grad_norm": 0.046063192994359826, |
|
"learning_rate": 0.00014019477766559604, |
|
"loss": 0.0348, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.8650674662668666, |
|
"grad_norm": 0.052806710216832004, |
|
"learning_rate": 0.00013995492028781202, |
|
"loss": 0.0432, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.8665667166416792, |
|
"grad_norm": 0.05319984444801645, |
|
"learning_rate": 0.00013971478906347806, |
|
"loss": 0.0362, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.8680659670164917, |
|
"grad_norm": 0.04452467143786472, |
|
"learning_rate": 0.0001394743856384267, |
|
"loss": 0.0314, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 0.05329898478614524, |
|
"learning_rate": 0.00013923371166035616, |
|
"loss": 0.0464, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8710644677661169, |
|
"grad_norm": 0.060886833574884516, |
|
"learning_rate": 0.00013899276877881884, |
|
"loss": 0.059, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.8725637181409296, |
|
"grad_norm": 0.05294256278470512, |
|
"learning_rate": 0.0001387515586452103, |
|
"loss": 0.046, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.8740629685157422, |
|
"grad_norm": 0.07449191398493356, |
|
"learning_rate": 0.00013851008291275782, |
|
"loss": 0.0343, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.8755622188905547, |
|
"grad_norm": 0.06943351016979123, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.0558, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.8770614692653673, |
|
"grad_norm": 0.06652499644935615, |
|
"learning_rate": 0.0001380263412733205, |
|
"loss": 0.0451, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.8785607196401799, |
|
"grad_norm": 0.05833246316842718, |
|
"learning_rate": 0.00013778407868184672, |
|
"loss": 0.0457, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.8800599700149925, |
|
"grad_norm": 0.04003719271959368, |
|
"learning_rate": 0.00013754155712252832, |
|
"loss": 0.0283, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.881559220389805, |
|
"grad_norm": 0.05607671087323687, |
|
"learning_rate": 0.0001372987782575809, |
|
"loss": 0.04, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.8830584707646177, |
|
"grad_norm": 0.040134500394464416, |
|
"learning_rate": 0.00013705574375098365, |
|
"loss": 0.0385, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.8845577211394303, |
|
"grad_norm": 0.06555335932088245, |
|
"learning_rate": 0.00013681245526846783, |
|
"loss": 0.072, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8860569715142429, |
|
"grad_norm": 0.05066618534250665, |
|
"learning_rate": 0.00013656891447750544, |
|
"loss": 0.0428, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.8875562218890555, |
|
"grad_norm": 0.04391780563799797, |
|
"learning_rate": 0.00013632512304729785, |
|
"loss": 0.0367, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.889055472263868, |
|
"grad_norm": 0.06116296036386768, |
|
"learning_rate": 0.0001360810826487642, |
|
"loss": 0.0462, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.8905547226386806, |
|
"grad_norm": 0.053934832910471985, |
|
"learning_rate": 0.00013583679495453, |
|
"loss": 0.046, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.8920539730134932, |
|
"grad_norm": 0.045163829899199405, |
|
"learning_rate": 0.0001355922616389159, |
|
"loss": 0.0362, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.8935532233883059, |
|
"grad_norm": 0.08655848208121547, |
|
"learning_rate": 0.00013534748437792573, |
|
"loss": 0.0406, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.8950524737631185, |
|
"grad_norm": 0.05391530518298683, |
|
"learning_rate": 0.00013510246484923547, |
|
"loss": 0.0293, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.896551724137931, |
|
"grad_norm": 0.04845707242796894, |
|
"learning_rate": 0.00013485720473218154, |
|
"loss": 0.0299, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.8980509745127436, |
|
"grad_norm": 0.04916815685149742, |
|
"learning_rate": 0.0001346117057077493, |
|
"loss": 0.0386, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.8995502248875562, |
|
"grad_norm": 0.06617662307537282, |
|
"learning_rate": 0.00013436596945856164, |
|
"loss": 0.0501, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9010494752623688, |
|
"grad_norm": 0.03991924636314799, |
|
"learning_rate": 0.0001341199976688672, |
|
"loss": 0.0282, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.9025487256371814, |
|
"grad_norm": 0.050388728813716206, |
|
"learning_rate": 0.00013387379202452917, |
|
"loss": 0.0352, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.904047976011994, |
|
"grad_norm": 0.03893791860248331, |
|
"learning_rate": 0.0001336273542130134, |
|
"loss": 0.021, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.9055472263868066, |
|
"grad_norm": 0.059548986139220876, |
|
"learning_rate": 0.0001333806859233771, |
|
"loss": 0.0798, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.9070464767616192, |
|
"grad_norm": 0.04599439337421781, |
|
"learning_rate": 0.0001331337888462571, |
|
"loss": 0.0351, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9085457271364318, |
|
"grad_norm": 0.05203471422944932, |
|
"learning_rate": 0.00013288666467385833, |
|
"loss": 0.0295, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.9100449775112444, |
|
"grad_norm": 0.055165249926359204, |
|
"learning_rate": 0.0001326393150999422, |
|
"loss": 0.0345, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.9115442278860569, |
|
"grad_norm": 0.13139481496590286, |
|
"learning_rate": 0.00013239174181981495, |
|
"loss": 0.0534, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.9130434782608695, |
|
"grad_norm": 0.04500674501515753, |
|
"learning_rate": 0.00013214394653031616, |
|
"loss": 0.0203, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.9145427286356822, |
|
"grad_norm": 0.05584447753073881, |
|
"learning_rate": 0.00013189593092980702, |
|
"loss": 0.0344, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9160419790104948, |
|
"grad_norm": 0.041591690138825445, |
|
"learning_rate": 0.00013164769671815862, |
|
"loss": 0.0201, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.9175412293853074, |
|
"grad_norm": 0.05297004561152026, |
|
"learning_rate": 0.00013139924559674052, |
|
"loss": 0.0381, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.9190404797601199, |
|
"grad_norm": 0.04848305915244054, |
|
"learning_rate": 0.00013115057926840884, |
|
"loss": 0.0349, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.9205397301349325, |
|
"grad_norm": 0.03324583015768157, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 0.028, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.9220389805097451, |
|
"grad_norm": 0.06071140630728308, |
|
"learning_rate": 0.0001306526078097928, |
|
"loss": 0.0407, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9235382308845578, |
|
"grad_norm": 0.055677239200326835, |
|
"learning_rate": 0.00013040330609254903, |
|
"loss": 0.0362, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.9250374812593704, |
|
"grad_norm": 0.05138799744779293, |
|
"learning_rate": 0.00013015379599444957, |
|
"loss": 0.0549, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.9265367316341829, |
|
"grad_norm": 0.053161721439074654, |
|
"learning_rate": 0.00012990407922560868, |
|
"loss": 0.0468, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.9280359820089955, |
|
"grad_norm": 0.0638192639644002, |
|
"learning_rate": 0.00012965415749755709, |
|
"loss": 0.0518, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.9295352323838081, |
|
"grad_norm": 0.047780862073805754, |
|
"learning_rate": 0.0001294040325232304, |
|
"loss": 0.0462, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9310344827586207, |
|
"grad_norm": 0.04020955585417902, |
|
"learning_rate": 0.00012915370601695715, |
|
"loss": 0.0331, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.9325337331334332, |
|
"grad_norm": 0.037639677657712055, |
|
"learning_rate": 0.00012890317969444716, |
|
"loss": 0.0248, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.9340329835082459, |
|
"grad_norm": 0.05345077130247162, |
|
"learning_rate": 0.00012865245527277986, |
|
"loss": 0.0478, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.9355322338830585, |
|
"grad_norm": 0.03711053033129508, |
|
"learning_rate": 0.00012840153447039228, |
|
"loss": 0.0199, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.9370314842578711, |
|
"grad_norm": 0.04911139204212449, |
|
"learning_rate": 0.00012815041900706754, |
|
"loss": 0.0301, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.9385307346326837, |
|
"grad_norm": 0.07752327109727443, |
|
"learning_rate": 0.00012789911060392294, |
|
"loss": 0.0692, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.9400299850074962, |
|
"grad_norm": 0.13232474472513436, |
|
"learning_rate": 0.0001276476109833981, |
|
"loss": 0.0634, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.9415292353823088, |
|
"grad_norm": 0.061926243742957965, |
|
"learning_rate": 0.00012739592186924328, |
|
"loss": 0.0397, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.9430284857571214, |
|
"grad_norm": 0.04356242072040339, |
|
"learning_rate": 0.00012714404498650743, |
|
"loss": 0.0297, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.9445277361319341, |
|
"grad_norm": 0.0376190098528388, |
|
"learning_rate": 0.00012689198206152657, |
|
"loss": 0.0271, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9460269865067467, |
|
"grad_norm": 0.05455511757321388, |
|
"learning_rate": 0.00012663973482191177, |
|
"loss": 0.0482, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.9475262368815592, |
|
"grad_norm": 0.04442045716631641, |
|
"learning_rate": 0.0001263873049965373, |
|
"loss": 0.0291, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.9490254872563718, |
|
"grad_norm": 0.054450506231905636, |
|
"learning_rate": 0.00012613469431552895, |
|
"loss": 0.0428, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.9505247376311844, |
|
"grad_norm": 0.044327380794719165, |
|
"learning_rate": 0.00012588190451025207, |
|
"loss": 0.0286, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.952023988005997, |
|
"grad_norm": 0.047204720536083225, |
|
"learning_rate": 0.00012562893731329967, |
|
"loss": 0.0406, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.9535232383808095, |
|
"grad_norm": 0.19462933782113725, |
|
"learning_rate": 0.00012537579445848058, |
|
"loss": 0.1233, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.9550224887556222, |
|
"grad_norm": 0.06994302712351169, |
|
"learning_rate": 0.00012512247768080756, |
|
"loss": 0.0403, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.9565217391304348, |
|
"grad_norm": 0.06646995144905621, |
|
"learning_rate": 0.0001248689887164855, |
|
"loss": 0.0394, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.9580209895052474, |
|
"grad_norm": 0.06551860726050548, |
|
"learning_rate": 0.00012461532930289933, |
|
"loss": 0.0613, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.95952023988006, |
|
"grad_norm": 0.04577051878676127, |
|
"learning_rate": 0.00012436150117860225, |
|
"loss": 0.0267, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9610194902548725, |
|
"grad_norm": 0.0501593890528279, |
|
"learning_rate": 0.00012410750608330388, |
|
"loss": 0.0415, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.9625187406296851, |
|
"grad_norm": 0.04213838081722136, |
|
"learning_rate": 0.0001238533457578581, |
|
"loss": 0.0363, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.9640179910044977, |
|
"grad_norm": 0.04618274978280509, |
|
"learning_rate": 0.00012359902194425133, |
|
"loss": 0.0268, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.9655172413793104, |
|
"grad_norm": 0.054945133222623746, |
|
"learning_rate": 0.00012334453638559057, |
|
"loss": 0.0411, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.967016491754123, |
|
"grad_norm": 0.07629757994067972, |
|
"learning_rate": 0.00012308989082609122, |
|
"loss": 0.0363, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.9685157421289355, |
|
"grad_norm": 0.040863705327469, |
|
"learning_rate": 0.00012283508701106557, |
|
"loss": 0.0302, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.9700149925037481, |
|
"grad_norm": 0.07100180343789458, |
|
"learning_rate": 0.0001225801266869104, |
|
"loss": 0.044, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.9715142428785607, |
|
"grad_norm": 0.06298768167998063, |
|
"learning_rate": 0.00012232501160109514, |
|
"loss": 0.0396, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.9730134932533733, |
|
"grad_norm": 0.05510372575164882, |
|
"learning_rate": 0.00012206974350215015, |
|
"loss": 0.044, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.974512743628186, |
|
"grad_norm": 0.0635198930062088, |
|
"learning_rate": 0.00012181432413965428, |
|
"loss": 0.0552, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9760119940029985, |
|
"grad_norm": 0.06043612042070538, |
|
"learning_rate": 0.00012155875526422332, |
|
"loss": 0.0617, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.9775112443778111, |
|
"grad_norm": 0.040855526708167285, |
|
"learning_rate": 0.00012130303862749767, |
|
"loss": 0.0268, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.9790104947526237, |
|
"grad_norm": 0.0496157050205137, |
|
"learning_rate": 0.00012104717598213056, |
|
"loss": 0.0225, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.9805097451274363, |
|
"grad_norm": 0.04029704523356512, |
|
"learning_rate": 0.00012079116908177593, |
|
"loss": 0.0339, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.9820089955022488, |
|
"grad_norm": 0.04963288277577189, |
|
"learning_rate": 0.00012053501968107641, |
|
"loss": 0.0346, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9835082458770614, |
|
"grad_norm": 0.06455026512135428, |
|
"learning_rate": 0.00012027872953565125, |
|
"loss": 0.0545, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.9850074962518741, |
|
"grad_norm": 0.044681736834349894, |
|
"learning_rate": 0.00012002230040208447, |
|
"loss": 0.0377, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.9865067466266867, |
|
"grad_norm": 0.06016026349629435, |
|
"learning_rate": 0.00011976573403791262, |
|
"loss": 0.0439, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.9880059970014993, |
|
"grad_norm": 0.03916055819189204, |
|
"learning_rate": 0.00011950903220161285, |
|
"loss": 0.0319, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.9895052473763118, |
|
"grad_norm": 0.0695326756638391, |
|
"learning_rate": 0.00011925219665259075, |
|
"loss": 0.0406, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9910044977511244, |
|
"grad_norm": 0.05484556659479051, |
|
"learning_rate": 0.00011899522915116848, |
|
"loss": 0.0413, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.992503748125937, |
|
"grad_norm": 0.06761504502922643, |
|
"learning_rate": 0.00011873813145857249, |
|
"loss": 0.0502, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.9940029985007496, |
|
"grad_norm": 0.03769561443583786, |
|
"learning_rate": 0.00011848090533692155, |
|
"loss": 0.0351, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.9955022488755623, |
|
"grad_norm": 0.05884790570998015, |
|
"learning_rate": 0.00011822355254921478, |
|
"loss": 0.0499, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.9970014992503748, |
|
"grad_norm": 0.05692462358820348, |
|
"learning_rate": 0.00011796607485931928, |
|
"loss": 0.0367, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9985007496251874, |
|
"grad_norm": 0.04183931910506595, |
|
"learning_rate": 0.00011770847403195834, |
|
"loss": 0.0362, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.05719242886094664, |
|
"learning_rate": 0.0001174507518326992, |
|
"loss": 0.0373, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.0589199960231781, |
|
"eval_runtime": 760.1358, |
|
"eval_samples_per_second": 13.65, |
|
"eval_steps_per_second": 0.854, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.0014992503748126, |
|
"grad_norm": 0.03762475183995106, |
|
"learning_rate": 0.00011719291002794096, |
|
"loss": 0.0313, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.0029985007496252, |
|
"grad_norm": 0.027248795473336453, |
|
"learning_rate": 0.00011693495038490245, |
|
"loss": 0.0202, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.0044977511244377, |
|
"grad_norm": 0.041298897947596655, |
|
"learning_rate": 0.00011667687467161024, |
|
"loss": 0.0318, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0059970014992503, |
|
"grad_norm": 0.041340873666624624, |
|
"learning_rate": 0.0001164186846568863, |
|
"loss": 0.0192, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.0074962518740629, |
|
"grad_norm": 0.03416279632561669, |
|
"learning_rate": 0.00011616038211033613, |
|
"loss": 0.0194, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.0089955022488755, |
|
"grad_norm": 0.04269382987865334, |
|
"learning_rate": 0.00011590196880233644, |
|
"loss": 0.0306, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.0104947526236883, |
|
"grad_norm": 0.04456460780740358, |
|
"learning_rate": 0.0001156434465040231, |
|
"loss": 0.0358, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.0119940029985008, |
|
"grad_norm": 0.037486633011275576, |
|
"learning_rate": 0.00011538481698727904, |
|
"loss": 0.03, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.0134932533733134, |
|
"grad_norm": 0.07944497636169431, |
|
"learning_rate": 0.00011512608202472194, |
|
"loss": 0.047, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.014992503748126, |
|
"grad_norm": 0.07167365891576202, |
|
"learning_rate": 0.00011486724338969232, |
|
"loss": 0.0281, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.0164917541229386, |
|
"grad_norm": 0.030600334090124463, |
|
"learning_rate": 0.00011460830285624118, |
|
"loss": 0.0185, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.0179910044977512, |
|
"grad_norm": 0.0334860041071292, |
|
"learning_rate": 0.00011434926219911793, |
|
"loss": 0.0205, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.0194902548725637, |
|
"grad_norm": 0.07452867174589614, |
|
"learning_rate": 0.00011409012319375827, |
|
"loss": 0.024, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.0209895052473763, |
|
"grad_norm": 0.05352241311204091, |
|
"learning_rate": 0.00011383088761627192, |
|
"loss": 0.0153, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.0224887556221889, |
|
"grad_norm": 0.05264974398072599, |
|
"learning_rate": 0.00011357155724343045, |
|
"loss": 0.0297, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.0239880059970015, |
|
"grad_norm": 0.043453829569375796, |
|
"learning_rate": 0.00011331213385265524, |
|
"loss": 0.0285, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.025487256371814, |
|
"grad_norm": 0.04694358278440352, |
|
"learning_rate": 0.00011305261922200519, |
|
"loss": 0.0308, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.0269865067466266, |
|
"grad_norm": 0.052332835831569786, |
|
"learning_rate": 0.00011279301513016444, |
|
"loss": 0.0276, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.0284857571214392, |
|
"grad_norm": 0.05731903299379123, |
|
"learning_rate": 0.00011253332335643043, |
|
"loss": 0.0368, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.0299850074962518, |
|
"grad_norm": 0.03577714273632661, |
|
"learning_rate": 0.0001122735456807015, |
|
"loss": 0.0159, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.0314842578710646, |
|
"grad_norm": 0.030987774585815354, |
|
"learning_rate": 0.00011201368388346471, |
|
"loss": 0.014, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.0329835082458771, |
|
"grad_norm": 0.04126960363600113, |
|
"learning_rate": 0.00011175373974578378, |
|
"loss": 0.0224, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 0.03027314685021926, |
|
"learning_rate": 0.00011149371504928668, |
|
"loss": 0.0187, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.0359820089955023, |
|
"grad_norm": 0.059016723534456986, |
|
"learning_rate": 0.00011123361157615355, |
|
"loss": 0.0336, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.0374812593703149, |
|
"grad_norm": 0.03460241895483622, |
|
"learning_rate": 0.00011097343110910452, |
|
"loss": 0.0172, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.0389805097451275, |
|
"grad_norm": 0.04905984564105954, |
|
"learning_rate": 0.00011071317543138738, |
|
"loss": 0.0202, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.04047976011994, |
|
"grad_norm": 0.0477670312888383, |
|
"learning_rate": 0.00011045284632676536, |
|
"loss": 0.0257, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.0419790104947526, |
|
"grad_norm": 0.0649026742045147, |
|
"learning_rate": 0.000110192445579505, |
|
"loss": 0.0528, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.0434782608695652, |
|
"grad_norm": 0.03838342057763881, |
|
"learning_rate": 0.00010993197497436391, |
|
"loss": 0.0176, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.0449775112443778, |
|
"grad_norm": 0.05081060598139383, |
|
"learning_rate": 0.00010967143629657842, |
|
"loss": 0.024, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.0464767616191903, |
|
"grad_norm": 0.07793509916227749, |
|
"learning_rate": 0.00010941083133185146, |
|
"loss": 0.0475, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.047976011994003, |
|
"grad_norm": 0.045160852102844276, |
|
"learning_rate": 0.00010915016186634026, |
|
"loss": 0.0169, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.0494752623688155, |
|
"grad_norm": 0.08881360543272801, |
|
"learning_rate": 0.00010888942968664417, |
|
"loss": 0.023, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.050974512743628, |
|
"grad_norm": 0.038412054850676614, |
|
"learning_rate": 0.00010862863657979237, |
|
"loss": 0.018, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.0524737631184409, |
|
"grad_norm": 0.051809463503864034, |
|
"learning_rate": 0.00010836778433323158, |
|
"loss": 0.0266, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.0539730134932535, |
|
"grad_norm": 0.059635918308077476, |
|
"learning_rate": 0.00010810687473481389, |
|
"loss": 0.0293, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.055472263868066, |
|
"grad_norm": 0.05223829787695676, |
|
"learning_rate": 0.0001078459095727845, |
|
"loss": 0.0333, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.0569715142428786, |
|
"grad_norm": 0.04788415350708223, |
|
"learning_rate": 0.00010758489063576939, |
|
"loss": 0.0225, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.0584707646176912, |
|
"grad_norm": 0.07606032889772184, |
|
"learning_rate": 0.00010732381971276318, |
|
"loss": 0.0504, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.0599700149925038, |
|
"grad_norm": 0.041830044788424676, |
|
"learning_rate": 0.00010706269859311669, |
|
"loss": 0.0339, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.0614692653673163, |
|
"grad_norm": 0.056499034345126806, |
|
"learning_rate": 0.00010680152906652483, |
|
"loss": 0.0797, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.062968515742129, |
|
"grad_norm": 0.050667538240860205, |
|
"learning_rate": 0.00010654031292301432, |
|
"loss": 0.0385, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.0644677661169415, |
|
"grad_norm": 0.03606037951763869, |
|
"learning_rate": 0.00010627905195293135, |
|
"loss": 0.0249, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.065967016491754, |
|
"grad_norm": 0.06482813154069911, |
|
"learning_rate": 0.00010601774794692935, |
|
"loss": 0.0322, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.0674662668665666, |
|
"grad_norm": 0.031472797559762344, |
|
"learning_rate": 0.00010575640269595675, |
|
"loss": 0.0163, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.0689655172413792, |
|
"grad_norm": 0.04865531174873738, |
|
"learning_rate": 0.0001054950179912446, |
|
"loss": 0.0265, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.0704647676161918, |
|
"grad_norm": 0.035242178613106155, |
|
"learning_rate": 0.0001052335956242944, |
|
"loss": 0.017, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.0719640179910046, |
|
"grad_norm": 0.10969495785314283, |
|
"learning_rate": 0.0001049721373868658, |
|
"loss": 0.051, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.0734632683658172, |
|
"grad_norm": 0.055243920108870075, |
|
"learning_rate": 0.00010471064507096426, |
|
"loss": 0.0299, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.0749625187406298, |
|
"grad_norm": 0.027766012133857647, |
|
"learning_rate": 0.00010444912046882888, |
|
"loss": 0.0121, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.0764617691154423, |
|
"grad_norm": 0.05149363616690094, |
|
"learning_rate": 0.00010418756537291996, |
|
"loss": 0.0237, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.077961019490255, |
|
"grad_norm": 0.0435854981748732, |
|
"learning_rate": 0.00010392598157590688, |
|
"loss": 0.0216, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.0794602698650675, |
|
"grad_norm": 0.048516448094552064, |
|
"learning_rate": 0.00010366437087065564, |
|
"loss": 0.03, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.08095952023988, |
|
"grad_norm": 0.05499780130973038, |
|
"learning_rate": 0.00010340273505021674, |
|
"loss": 0.0263, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.0824587706146926, |
|
"grad_norm": 0.06511989459756257, |
|
"learning_rate": 0.00010314107590781284, |
|
"loss": 0.0374, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.0839580209895052, |
|
"grad_norm": 0.05279506351690483, |
|
"learning_rate": 0.00010287939523682634, |
|
"loss": 0.026, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.0854572713643178, |
|
"grad_norm": 0.049766335190166044, |
|
"learning_rate": 0.00010261769483078733, |
|
"loss": 0.0362, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.0869565217391304, |
|
"grad_norm": 0.0616925740357659, |
|
"learning_rate": 0.00010235597648336104, |
|
"loss": 0.0272, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.088455772113943, |
|
"grad_norm": 0.035580584914971236, |
|
"learning_rate": 0.0001020942419883357, |
|
"loss": 0.0246, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.0899550224887555, |
|
"grad_norm": 0.05651314515303107, |
|
"learning_rate": 0.0001018324931396103, |
|
"loss": 0.0389, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.0914542728635683, |
|
"grad_norm": 0.03403275870606777, |
|
"learning_rate": 0.00010157073173118208, |
|
"loss": 0.017, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.092953523238381, |
|
"grad_norm": 0.04001508080874846, |
|
"learning_rate": 0.00010130895955713445, |
|
"loss": 0.0344, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.0944527736131935, |
|
"grad_norm": 0.038898635048905054, |
|
"learning_rate": 0.00010104717841162458, |
|
"loss": 0.0139, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.095952023988006, |
|
"grad_norm": 0.04852969796297334, |
|
"learning_rate": 0.00010078539008887114, |
|
"loss": 0.0303, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.0974512743628186, |
|
"grad_norm": 0.030415262260471615, |
|
"learning_rate": 0.00010052359638314195, |
|
"loss": 0.0209, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.0989505247376312, |
|
"grad_norm": 0.03296035338712638, |
|
"learning_rate": 0.00010026179908874183, |
|
"loss": 0.0176, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.1004497751124438, |
|
"grad_norm": 0.06912112785698081, |
|
"learning_rate": 0.0001, |
|
"loss": 0.0911, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.1019490254872564, |
|
"grad_norm": 0.04126417647100554, |
|
"learning_rate": 9.973820091125821e-05, |
|
"loss": 0.0278, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.103448275862069, |
|
"grad_norm": 0.06337110286285316, |
|
"learning_rate": 9.947640361685804e-05, |
|
"loss": 0.0423, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.1049475262368815, |
|
"grad_norm": 0.04785214604294027, |
|
"learning_rate": 9.921460991112891e-05, |
|
"loss": 0.019, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.106446776611694, |
|
"grad_norm": 0.03813515686538096, |
|
"learning_rate": 9.895282158837545e-05, |
|
"loss": 0.0147, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.1079460269865067, |
|
"grad_norm": 0.04010157831438066, |
|
"learning_rate": 9.869104044286558e-05, |
|
"loss": 0.0276, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.1094452773613193, |
|
"grad_norm": 0.03406965983248011, |
|
"learning_rate": 9.842926826881796e-05, |
|
"loss": 0.0149, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.1109445277361318, |
|
"grad_norm": 0.05122540759813054, |
|
"learning_rate": 9.816750686038973e-05, |
|
"loss": 0.0218, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.1124437781109444, |
|
"grad_norm": 0.046947765546721, |
|
"learning_rate": 9.790575801166432e-05, |
|
"loss": 0.025, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.1139430284857572, |
|
"grad_norm": 0.07498886528603434, |
|
"learning_rate": 9.764402351663901e-05, |
|
"loss": 0.0434, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.1154422788605698, |
|
"grad_norm": 0.03187418225048272, |
|
"learning_rate": 9.73823051692127e-05, |
|
"loss": 0.0164, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.1169415292353824, |
|
"grad_norm": 0.05124246170818971, |
|
"learning_rate": 9.712060476317367e-05, |
|
"loss": 0.027, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.118440779610195, |
|
"grad_norm": 0.04064336939991715, |
|
"learning_rate": 9.685892409218717e-05, |
|
"loss": 0.0312, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.1199400299850075, |
|
"grad_norm": 0.02940931109265308, |
|
"learning_rate": 9.659726494978325e-05, |
|
"loss": 0.0138, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.12143928035982, |
|
"grad_norm": 0.07160694268820639, |
|
"learning_rate": 9.633562912934436e-05, |
|
"loss": 0.0638, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.1229385307346327, |
|
"grad_norm": 0.03268947669426935, |
|
"learning_rate": 9.607401842409317e-05, |
|
"loss": 0.0152, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.1244377811094453, |
|
"grad_norm": 0.04469947279909564, |
|
"learning_rate": 9.581243462708006e-05, |
|
"loss": 0.0519, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.1259370314842578, |
|
"grad_norm": 0.03733568199384118, |
|
"learning_rate": 9.555087953117114e-05, |
|
"loss": 0.0191, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.1274362818590704, |
|
"grad_norm": 0.04957459904827682, |
|
"learning_rate": 9.528935492903575e-05, |
|
"loss": 0.0258, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.128935532233883, |
|
"grad_norm": 0.04291232620005789, |
|
"learning_rate": 9.502786261313423e-05, |
|
"loss": 0.0186, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.1304347826086956, |
|
"grad_norm": 0.03645205093124093, |
|
"learning_rate": 9.476640437570562e-05, |
|
"loss": 0.0179, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.1319340329835081, |
|
"grad_norm": 0.036883531854657765, |
|
"learning_rate": 9.450498200875546e-05, |
|
"loss": 0.0181, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.133433283358321, |
|
"grad_norm": 0.03611209493776261, |
|
"learning_rate": 9.424359730404329e-05, |
|
"loss": 0.0149, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.1349325337331335, |
|
"grad_norm": 0.03543885857842899, |
|
"learning_rate": 9.398225205307066e-05, |
|
"loss": 0.0179, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.136431784107946, |
|
"grad_norm": 0.04885756178865602, |
|
"learning_rate": 9.372094804706867e-05, |
|
"loss": 0.0363, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.1379310344827587, |
|
"grad_norm": 0.045201667917713226, |
|
"learning_rate": 9.345968707698569e-05, |
|
"loss": 0.015, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.1394302848575713, |
|
"grad_norm": 0.045725248124270736, |
|
"learning_rate": 9.319847093347522e-05, |
|
"loss": 0.0242, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.1409295352323838, |
|
"grad_norm": 0.03246760399537495, |
|
"learning_rate": 9.293730140688336e-05, |
|
"loss": 0.0136, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.1424287856071964, |
|
"grad_norm": 0.04842555683642214, |
|
"learning_rate": 9.267618028723686e-05, |
|
"loss": 0.0178, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.143928035982009, |
|
"grad_norm": 0.09178292202784064, |
|
"learning_rate": 9.241510936423062e-05, |
|
"loss": 0.0293, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.1454272863568216, |
|
"grad_norm": 0.03732797810025069, |
|
"learning_rate": 9.215409042721552e-05, |
|
"loss": 0.032, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.1469265367316341, |
|
"grad_norm": 0.06084284216603883, |
|
"learning_rate": 9.189312526518613e-05, |
|
"loss": 0.0298, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.1484257871064467, |
|
"grad_norm": 0.05070843744675052, |
|
"learning_rate": 9.163221566676847e-05, |
|
"loss": 0.0115, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.1499250374812593, |
|
"grad_norm": 0.040941353399746576, |
|
"learning_rate": 9.137136342020768e-05, |
|
"loss": 0.0219, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.1514242878560719, |
|
"grad_norm": 0.056138839618842994, |
|
"learning_rate": 9.111057031335585e-05, |
|
"loss": 0.0209, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.1529235382308847, |
|
"grad_norm": 0.053017337582656095, |
|
"learning_rate": 9.084983813365978e-05, |
|
"loss": 0.0139, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.1544227886056972, |
|
"grad_norm": 0.0437160406734324, |
|
"learning_rate": 9.058916866814858e-05, |
|
"loss": 0.0292, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.1559220389805098, |
|
"grad_norm": 0.06517357369678477, |
|
"learning_rate": 9.032856370342158e-05, |
|
"loss": 0.0314, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.1574212893553224, |
|
"grad_norm": 0.05616399006235695, |
|
"learning_rate": 9.006802502563612e-05, |
|
"loss": 0.0251, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.158920539730135, |
|
"grad_norm": 0.046115228142336546, |
|
"learning_rate": 8.980755442049502e-05, |
|
"loss": 0.0198, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.1604197901049476, |
|
"grad_norm": 0.03835516395560399, |
|
"learning_rate": 8.954715367323468e-05, |
|
"loss": 0.0264, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.1619190404797601, |
|
"grad_norm": 0.05643079022619911, |
|
"learning_rate": 8.928682456861265e-05, |
|
"loss": 0.0255, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.1634182908545727, |
|
"grad_norm": 0.03736421317747863, |
|
"learning_rate": 8.902656889089548e-05, |
|
"loss": 0.0185, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.1649175412293853, |
|
"grad_norm": 0.051100575541428585, |
|
"learning_rate": 8.876638842384644e-05, |
|
"loss": 0.0201, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.1664167916041979, |
|
"grad_norm": 0.03430703468298361, |
|
"learning_rate": 8.850628495071336e-05, |
|
"loss": 0.0152, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.1679160419790104, |
|
"grad_norm": 0.030283377580051823, |
|
"learning_rate": 8.824626025421626e-05, |
|
"loss": 0.0189, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.169415292353823, |
|
"grad_norm": 0.04693707293564595, |
|
"learning_rate": 8.79863161165353e-05, |
|
"loss": 0.0265, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.1709145427286356, |
|
"grad_norm": 0.0670635890351688, |
|
"learning_rate": 8.772645431929851e-05, |
|
"loss": 0.043, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.1724137931034484, |
|
"grad_norm": 0.043848670372825894, |
|
"learning_rate": 8.746667664356956e-05, |
|
"loss": 0.0217, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.1739130434782608, |
|
"grad_norm": 0.05412472149445996, |
|
"learning_rate": 8.720698486983557e-05, |
|
"loss": 0.0197, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.1754122938530736, |
|
"grad_norm": 0.019649617759211542, |
|
"learning_rate": 8.694738077799488e-05, |
|
"loss": 0.0073, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.1769115442278861, |
|
"grad_norm": 0.05225640536933059, |
|
"learning_rate": 8.668786614734478e-05, |
|
"loss": 0.0276, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.1784107946026987, |
|
"grad_norm": 0.04315118247949963, |
|
"learning_rate": 8.642844275656957e-05, |
|
"loss": 0.0324, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.1799100449775113, |
|
"grad_norm": 0.029412653837457406, |
|
"learning_rate": 8.616911238372812e-05, |
|
"loss": 0.014, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.1814092953523239, |
|
"grad_norm": 0.04133482214444302, |
|
"learning_rate": 8.590987680624174e-05, |
|
"loss": 0.0202, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.1829085457271364, |
|
"grad_norm": 0.06765748401829244, |
|
"learning_rate": 8.565073780088208e-05, |
|
"loss": 0.0441, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.184407796101949, |
|
"grad_norm": 0.05271546437062786, |
|
"learning_rate": 8.539169714375885e-05, |
|
"loss": 0.0256, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.1859070464767616, |
|
"grad_norm": 0.040199086711978846, |
|
"learning_rate": 8.51327566103077e-05, |
|
"loss": 0.0139, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.1874062968515742, |
|
"grad_norm": 0.04400948652648687, |
|
"learning_rate": 8.487391797527808e-05, |
|
"loss": 0.0298, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.1889055472263867, |
|
"grad_norm": 0.04023525299067876, |
|
"learning_rate": 8.461518301272099e-05, |
|
"loss": 0.025, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.1904047976011993, |
|
"grad_norm": 0.03582585876670016, |
|
"learning_rate": 8.435655349597689e-05, |
|
"loss": 0.0206, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.191904047976012, |
|
"grad_norm": 0.04826655494275668, |
|
"learning_rate": 8.409803119766361e-05, |
|
"loss": 0.0242, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.1934032983508245, |
|
"grad_norm": 0.05078096150994414, |
|
"learning_rate": 8.383961788966391e-05, |
|
"loss": 0.0212, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.1949025487256373, |
|
"grad_norm": 0.04974688478126328, |
|
"learning_rate": 8.358131534311372e-05, |
|
"loss": 0.0262, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.1964017991004499, |
|
"grad_norm": 0.03592037995042853, |
|
"learning_rate": 8.332312532838978e-05, |
|
"loss": 0.0159, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.1979010494752624, |
|
"grad_norm": 0.0491861501338674, |
|
"learning_rate": 8.306504961509754e-05, |
|
"loss": 0.0234, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.199400299850075, |
|
"grad_norm": 0.027759792015070672, |
|
"learning_rate": 8.280708997205904e-05, |
|
"loss": 0.0136, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.2008995502248876, |
|
"grad_norm": 0.0548378934219744, |
|
"learning_rate": 8.254924816730082e-05, |
|
"loss": 0.0251, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.2023988005997002, |
|
"grad_norm": 0.031042646875865575, |
|
"learning_rate": 8.229152596804168e-05, |
|
"loss": 0.0114, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.2038980509745127, |
|
"grad_norm": 0.04185558020402479, |
|
"learning_rate": 8.203392514068074e-05, |
|
"loss": 0.0196, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.2053973013493253, |
|
"grad_norm": 0.03573774524393669, |
|
"learning_rate": 8.177644745078526e-05, |
|
"loss": 0.029, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.04500572749598161, |
|
"learning_rate": 8.151909466307845e-05, |
|
"loss": 0.0283, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.2083958020989505, |
|
"grad_norm": 0.055472853240349465, |
|
"learning_rate": 8.126186854142752e-05, |
|
"loss": 0.0278, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.209895052473763, |
|
"grad_norm": 0.04737096525754299, |
|
"learning_rate": 8.100477084883156e-05, |
|
"loss": 0.0212, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.2113943028485756, |
|
"grad_norm": 0.06377978614115022, |
|
"learning_rate": 8.074780334740928e-05, |
|
"loss": 0.0264, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.2128935532233882, |
|
"grad_norm": 0.1037734762277636, |
|
"learning_rate": 8.049096779838719e-05, |
|
"loss": 0.0653, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.214392803598201, |
|
"grad_norm": 0.04938456491428775, |
|
"learning_rate": 8.023426596208739e-05, |
|
"loss": 0.0359, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.2158920539730136, |
|
"grad_norm": 0.03871245605295673, |
|
"learning_rate": 7.997769959791554e-05, |
|
"loss": 0.0189, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.2173913043478262, |
|
"grad_norm": 0.04836114902803376, |
|
"learning_rate": 7.972127046434878e-05, |
|
"loss": 0.0216, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.2188905547226387, |
|
"grad_norm": 0.06566867333027651, |
|
"learning_rate": 7.946498031892364e-05, |
|
"loss": 0.0289, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.2203898050974513, |
|
"grad_norm": 0.07837421636117799, |
|
"learning_rate": 7.920883091822408e-05, |
|
"loss": 0.0312, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.221889055472264, |
|
"grad_norm": 0.05650900430884517, |
|
"learning_rate": 7.895282401786945e-05, |
|
"loss": 0.025, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.2233883058470765, |
|
"grad_norm": 0.04878788564349848, |
|
"learning_rate": 7.869696137250235e-05, |
|
"loss": 0.0268, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.224887556221889, |
|
"grad_norm": 0.048966519858149045, |
|
"learning_rate": 7.844124473577672e-05, |
|
"loss": 0.0315, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.2263868065967016, |
|
"grad_norm": 0.03824407231228468, |
|
"learning_rate": 7.818567586034577e-05, |
|
"loss": 0.0198, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.2278860569715142, |
|
"grad_norm": 0.0529744116915096, |
|
"learning_rate": 7.79302564978499e-05, |
|
"loss": 0.0381, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.2293853073463268, |
|
"grad_norm": 0.05426023606207575, |
|
"learning_rate": 7.767498839890488e-05, |
|
"loss": 0.0295, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.2308845577211394, |
|
"grad_norm": 0.03534659066797283, |
|
"learning_rate": 7.741987331308964e-05, |
|
"loss": 0.0197, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.232383808095952, |
|
"grad_norm": 0.15173262879838179, |
|
"learning_rate": 7.716491298893442e-05, |
|
"loss": 0.0299, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.2338830584707647, |
|
"grad_norm": 0.04753989562431895, |
|
"learning_rate": 7.691010917390874e-05, |
|
"loss": 0.021, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.235382308845577, |
|
"grad_norm": 0.045421468563024, |
|
"learning_rate": 7.66554636144095e-05, |
|
"loss": 0.0315, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.23688155922039, |
|
"grad_norm": 0.047073846935776716, |
|
"learning_rate": 7.64009780557487e-05, |
|
"loss": 0.0214, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.2383808095952025, |
|
"grad_norm": 0.05224603067353402, |
|
"learning_rate": 7.614665424214193e-05, |
|
"loss": 0.0217, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.239880059970015, |
|
"grad_norm": 0.04520300461414657, |
|
"learning_rate": 7.589249391669616e-05, |
|
"loss": 0.0213, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.2413793103448276, |
|
"grad_norm": 0.04736894258409455, |
|
"learning_rate": 7.563849882139776e-05, |
|
"loss": 0.0237, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.2428785607196402, |
|
"grad_norm": 0.053766180923070186, |
|
"learning_rate": 7.53846706971007e-05, |
|
"loss": 0.0323, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.2443778110944528, |
|
"grad_norm": 0.05528412145749755, |
|
"learning_rate": 7.513101128351454e-05, |
|
"loss": 0.0487, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.2458770614692654, |
|
"grad_norm": 0.05566014947606353, |
|
"learning_rate": 7.487752231919245e-05, |
|
"loss": 0.0267, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.247376311844078, |
|
"grad_norm": 0.04083106652022665, |
|
"learning_rate": 7.462420554151944e-05, |
|
"loss": 0.0278, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.2488755622188905, |
|
"grad_norm": 0.06891562106271396, |
|
"learning_rate": 7.437106268670034e-05, |
|
"loss": 0.0294, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.250374812593703, |
|
"grad_norm": 0.07342638900103611, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.0845, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.2518740629685157, |
|
"grad_norm": 0.050466963864324595, |
|
"learning_rate": 7.386530568447107e-05, |
|
"loss": 0.0298, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.2533733133433285, |
|
"grad_norm": 0.06686910207648407, |
|
"learning_rate": 7.361269500346274e-05, |
|
"loss": 0.0275, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.2548725637181408, |
|
"grad_norm": 0.046075881059396175, |
|
"learning_rate": 7.336026517808827e-05, |
|
"loss": 0.0214, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.2563718140929536, |
|
"grad_norm": 0.049483313231029974, |
|
"learning_rate": 7.310801793847344e-05, |
|
"loss": 0.0295, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.2578710644677662, |
|
"grad_norm": 0.06374973164823877, |
|
"learning_rate": 7.285595501349258e-05, |
|
"loss": 0.0544, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.2593703148425788, |
|
"grad_norm": 0.2122029055345762, |
|
"learning_rate": 7.260407813075676e-05, |
|
"loss": 0.099, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.2608695652173914, |
|
"grad_norm": 0.02852293268003914, |
|
"learning_rate": 7.235238901660195e-05, |
|
"loss": 0.0166, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.262368815592204, |
|
"grad_norm": 0.04115201623040188, |
|
"learning_rate": 7.210088939607708e-05, |
|
"loss": 0.0254, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.2638680659670165, |
|
"grad_norm": 0.05628533766124335, |
|
"learning_rate": 7.184958099293246e-05, |
|
"loss": 0.0365, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.265367316341829, |
|
"grad_norm": 0.08094057068973667, |
|
"learning_rate": 7.159846552960774e-05, |
|
"loss": 0.0222, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.2668665667166417, |
|
"grad_norm": 0.03799623111533529, |
|
"learning_rate": 7.134754472722017e-05, |
|
"loss": 0.0224, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.2683658170914542, |
|
"grad_norm": 0.04700845879809844, |
|
"learning_rate": 7.109682030555283e-05, |
|
"loss": 0.0224, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.2698650674662668, |
|
"grad_norm": 0.048949756388667436, |
|
"learning_rate": 7.08462939830429e-05, |
|
"loss": 0.018, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.2713643178410794, |
|
"grad_norm": 0.047029812926556, |
|
"learning_rate": 7.059596747676962e-05, |
|
"loss": 0.0266, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.272863568215892, |
|
"grad_norm": 0.030351552812581083, |
|
"learning_rate": 7.034584250244291e-05, |
|
"loss": 0.0151, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.2743628185907045, |
|
"grad_norm": 0.052275221441315155, |
|
"learning_rate": 7.009592077439134e-05, |
|
"loss": 0.0198, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.2758620689655173, |
|
"grad_norm": 0.04376783013783442, |
|
"learning_rate": 6.984620400555044e-05, |
|
"loss": 0.0167, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.2773613193403297, |
|
"grad_norm": 0.054785397673240276, |
|
"learning_rate": 6.959669390745097e-05, |
|
"loss": 0.0209, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.2788605697151425, |
|
"grad_norm": 0.04062686089747498, |
|
"learning_rate": 6.934739219020726e-05, |
|
"loss": 0.015, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.280359820089955, |
|
"grad_norm": 0.06936614542121676, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 0.0272, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.2818590704647677, |
|
"grad_norm": 0.06026615690676764, |
|
"learning_rate": 6.88494207315912e-05, |
|
"loss": 0.0271, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.2833583208395802, |
|
"grad_norm": 0.07021407853748118, |
|
"learning_rate": 6.860075440325951e-05, |
|
"loss": 0.0308, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.2848575712143928, |
|
"grad_norm": 0.052574527846576825, |
|
"learning_rate": 6.835230328184138e-05, |
|
"loss": 0.0175, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.2863568215892054, |
|
"grad_norm": 0.05298669412741438, |
|
"learning_rate": 6.8104069070193e-05, |
|
"loss": 0.0176, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.287856071964018, |
|
"grad_norm": 0.04213736228746085, |
|
"learning_rate": 6.785605346968386e-05, |
|
"loss": 0.0175, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.2893553223388305, |
|
"grad_norm": 0.03297170359235013, |
|
"learning_rate": 6.760825818018508e-05, |
|
"loss": 0.0293, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2908545727136431, |
|
"grad_norm": 0.07532993960457951, |
|
"learning_rate": 6.736068490005783e-05, |
|
"loss": 0.0361, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.2923538230884557, |
|
"grad_norm": 0.06949245335822227, |
|
"learning_rate": 6.711333532614168e-05, |
|
"loss": 0.0428, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.2938530734632683, |
|
"grad_norm": 0.033794502875320105, |
|
"learning_rate": 6.68662111537429e-05, |
|
"loss": 0.019, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.295352323838081, |
|
"grad_norm": 0.042862124096238796, |
|
"learning_rate": 6.661931407662292e-05, |
|
"loss": 0.0165, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.2968515742128934, |
|
"grad_norm": 0.03762469114322251, |
|
"learning_rate": 6.637264578698664e-05, |
|
"loss": 0.0177, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.2983508245877062, |
|
"grad_norm": 0.06681907271371797, |
|
"learning_rate": 6.612620797547087e-05, |
|
"loss": 0.0425, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.2998500749625188, |
|
"grad_norm": 0.06461028945930743, |
|
"learning_rate": 6.588000233113282e-05, |
|
"loss": 0.033, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.3013493253373314, |
|
"grad_norm": 0.03602981021835219, |
|
"learning_rate": 6.56340305414384e-05, |
|
"loss": 0.0185, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.302848575712144, |
|
"grad_norm": 0.07041331964732868, |
|
"learning_rate": 6.538829429225069e-05, |
|
"loss": 0.0375, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.3043478260869565, |
|
"grad_norm": 0.053845021312289056, |
|
"learning_rate": 6.51427952678185e-05, |
|
"loss": 0.0264, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.3058470764617691, |
|
"grad_norm": 0.03459818578421578, |
|
"learning_rate": 6.489753515076456e-05, |
|
"loss": 0.0145, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.3073463268365817, |
|
"grad_norm": 0.04351524337603825, |
|
"learning_rate": 6.465251562207431e-05, |
|
"loss": 0.0207, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.3088455772113943, |
|
"grad_norm": 0.04456815741914844, |
|
"learning_rate": 6.440773836108413e-05, |
|
"loss": 0.0278, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.3103448275862069, |
|
"grad_norm": 0.061834383611745806, |
|
"learning_rate": 6.416320504546997e-05, |
|
"loss": 0.0332, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.3118440779610194, |
|
"grad_norm": 0.045688059219150534, |
|
"learning_rate": 6.391891735123582e-05, |
|
"loss": 0.0435, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.313343328335832, |
|
"grad_norm": 0.0499097200922593, |
|
"learning_rate": 6.367487695270217e-05, |
|
"loss": 0.0225, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.3148425787106448, |
|
"grad_norm": 0.03930515307322255, |
|
"learning_rate": 6.343108552249457e-05, |
|
"loss": 0.0213, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.3163418290854572, |
|
"grad_norm": 0.04750578901510908, |
|
"learning_rate": 6.318754473153221e-05, |
|
"loss": 0.0344, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.31784107946027, |
|
"grad_norm": 0.05610290200169092, |
|
"learning_rate": 6.294425624901638e-05, |
|
"loss": 0.0322, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.3193403298350825, |
|
"grad_norm": 0.029473494282681995, |
|
"learning_rate": 6.27012217424191e-05, |
|
"loss": 0.0134, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.3208395802098951, |
|
"grad_norm": 0.05781224980544649, |
|
"learning_rate": 6.245844287747168e-05, |
|
"loss": 0.0354, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.3223388305847077, |
|
"grad_norm": 0.06267338674733103, |
|
"learning_rate": 6.22159213181533e-05, |
|
"loss": 0.0361, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.3238380809595203, |
|
"grad_norm": 0.038066306071301584, |
|
"learning_rate": 6.197365872667952e-05, |
|
"loss": 0.0181, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.3253373313343328, |
|
"grad_norm": 0.07878387260295196, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.0682, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.3268365817091454, |
|
"grad_norm": 0.04524683438782359, |
|
"learning_rate": 6.14899170872422e-05, |
|
"loss": 0.0295, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.328335832083958, |
|
"grad_norm": 0.034393149243530124, |
|
"learning_rate": 6.12484413547897e-05, |
|
"loss": 0.0129, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.3298350824587706, |
|
"grad_norm": 0.04589569278929691, |
|
"learning_rate": 6.1007231221181206e-05, |
|
"loss": 0.0214, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.3313343328335832, |
|
"grad_norm": 0.0999532474797914, |
|
"learning_rate": 6.076628833964388e-05, |
|
"loss": 0.0241, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.3328335832083957, |
|
"grad_norm": 0.06084071636014458, |
|
"learning_rate": 6.052561436157329e-05, |
|
"loss": 0.0242, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.3343328335832085, |
|
"grad_norm": 0.04917934868115785, |
|
"learning_rate": 6.0285210936521955e-05, |
|
"loss": 0.0197, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.3358320839580209, |
|
"grad_norm": 0.069354361338318, |
|
"learning_rate": 6.0045079712188014e-05, |
|
"loss": 0.0275, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.3373313343328337, |
|
"grad_norm": 0.048448533551978, |
|
"learning_rate": 5.9805222334404e-05, |
|
"loss": 0.0167, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.338830584707646, |
|
"grad_norm": 0.052410701670247924, |
|
"learning_rate": 5.956564044712551e-05, |
|
"loss": 0.0207, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.3403298350824588, |
|
"grad_norm": 0.042448711771357485, |
|
"learning_rate": 5.9326335692419995e-05, |
|
"loss": 0.0192, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.3418290854572714, |
|
"grad_norm": 0.05234893125921864, |
|
"learning_rate": 5.908730971045542e-05, |
|
"loss": 0.0317, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.343328335832084, |
|
"grad_norm": 0.04331314810299154, |
|
"learning_rate": 5.884856413948913e-05, |
|
"loss": 0.0249, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.3448275862068966, |
|
"grad_norm": 0.05916456115329105, |
|
"learning_rate": 5.8610100615856524e-05, |
|
"loss": 0.0462, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.3463268365817092, |
|
"grad_norm": 0.062115100364669586, |
|
"learning_rate": 5.83719207739599e-05, |
|
"loss": 0.0256, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.3478260869565217, |
|
"grad_norm": 0.035880225141386626, |
|
"learning_rate": 5.8134026246257225e-05, |
|
"loss": 0.0191, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.3493253373313343, |
|
"grad_norm": 0.0573517254033885, |
|
"learning_rate": 5.789641866325091e-05, |
|
"loss": 0.0262, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.3508245877061469, |
|
"grad_norm": 0.060303599881880644, |
|
"learning_rate": 5.76590996534768e-05, |
|
"loss": 0.0278, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.3523238380809595, |
|
"grad_norm": 0.09628521288179971, |
|
"learning_rate": 5.7422070843492734e-05, |
|
"loss": 0.0356, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.353823088455772, |
|
"grad_norm": 0.03617057375661126, |
|
"learning_rate": 5.718533385786772e-05, |
|
"loss": 0.02, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.3553223388305846, |
|
"grad_norm": 0.044174128599893586, |
|
"learning_rate": 5.694889031917047e-05, |
|
"loss": 0.022, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.3568215892053974, |
|
"grad_norm": 0.06221884291802812, |
|
"learning_rate": 5.671274184795865e-05, |
|
"loss": 0.0233, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.3583208395802098, |
|
"grad_norm": 0.0711661250860426, |
|
"learning_rate": 5.647689006276726e-05, |
|
"loss": 0.0449, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.3598200899550226, |
|
"grad_norm": 0.03490331687529149, |
|
"learning_rate": 5.624133658009817e-05, |
|
"loss": 0.0192, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.3613193403298351, |
|
"grad_norm": 0.06280166629494677, |
|
"learning_rate": 5.6006083014408484e-05, |
|
"loss": 0.0257, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.3628185907046477, |
|
"grad_norm": 0.049297420747513795, |
|
"learning_rate": 5.577113097809989e-05, |
|
"loss": 0.0287, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.3643178410794603, |
|
"grad_norm": 0.04383360423444908, |
|
"learning_rate": 5.553648208150728e-05, |
|
"loss": 0.0159, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.3658170914542729, |
|
"grad_norm": 0.06660129279871392, |
|
"learning_rate": 5.53021379328879e-05, |
|
"loss": 0.0316, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.3673163418290855, |
|
"grad_norm": 0.030935221991709352, |
|
"learning_rate": 5.506810013841036e-05, |
|
"loss": 0.0141, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.368815592203898, |
|
"grad_norm": 0.06598665791404248, |
|
"learning_rate": 5.483437030214343e-05, |
|
"loss": 0.0238, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.3703148425787106, |
|
"grad_norm": 0.05185709390744075, |
|
"learning_rate": 5.4600950026045326e-05, |
|
"loss": 0.0268, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.3718140929535232, |
|
"grad_norm": 0.06112957554123978, |
|
"learning_rate": 5.436784090995239e-05, |
|
"loss": 0.0384, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.3733133433283358, |
|
"grad_norm": 0.02981664047266952, |
|
"learning_rate": 5.4135044551568546e-05, |
|
"loss": 0.0179, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.3748125937031483, |
|
"grad_norm": 0.03404310873892391, |
|
"learning_rate": 5.390256254645378e-05, |
|
"loss": 0.0133, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.3763118440779611, |
|
"grad_norm": 0.02748593423758078, |
|
"learning_rate": 5.3670396488013854e-05, |
|
"loss": 0.01, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.3778110944527735, |
|
"grad_norm": 0.044108908050431356, |
|
"learning_rate": 5.343854796748886e-05, |
|
"loss": 0.016, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.04555240266490368, |
|
"learning_rate": 5.320701857394268e-05, |
|
"loss": 0.0242, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.3808095952023989, |
|
"grad_norm": 0.025073591589012855, |
|
"learning_rate": 5.297580989425177e-05, |
|
"loss": 0.0102, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.3823088455772115, |
|
"grad_norm": 0.03799373887428538, |
|
"learning_rate": 5.274492351309461e-05, |
|
"loss": 0.0199, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.383808095952024, |
|
"grad_norm": 0.0634147924268712, |
|
"learning_rate": 5.251436101294056e-05, |
|
"loss": 0.0261, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.3853073463268366, |
|
"grad_norm": 0.05771328219018494, |
|
"learning_rate": 5.2284123974039154e-05, |
|
"loss": 0.0283, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.3868065967016492, |
|
"grad_norm": 0.04619876933398743, |
|
"learning_rate": 5.205421397440935e-05, |
|
"loss": 0.0218, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.3883058470764618, |
|
"grad_norm": 0.05211266829503708, |
|
"learning_rate": 5.182463258982846e-05, |
|
"loss": 0.0273, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.3898050974512743, |
|
"grad_norm": 0.0405536781310804, |
|
"learning_rate": 5.159538139382167e-05, |
|
"loss": 0.0163, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.391304347826087, |
|
"grad_norm": 0.05228157708952931, |
|
"learning_rate": 5.1366461957650954e-05, |
|
"loss": 0.0148, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.3928035982008995, |
|
"grad_norm": 0.051865023170566474, |
|
"learning_rate": 5.113787585030454e-05, |
|
"loss": 0.0253, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.394302848575712, |
|
"grad_norm": 0.08414586524418576, |
|
"learning_rate": 5.090962463848592e-05, |
|
"loss": 0.0256, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.3958020989505249, |
|
"grad_norm": 0.04333393205488659, |
|
"learning_rate": 5.068170988660346e-05, |
|
"loss": 0.0155, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.3973013493253372, |
|
"grad_norm": 0.033783013394866904, |
|
"learning_rate": 5.045413315675924e-05, |
|
"loss": 0.0178, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.39880059970015, |
|
"grad_norm": 0.041794554720093746, |
|
"learning_rate": 5.0226896008738786e-05, |
|
"loss": 0.0134, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.4002998500749624, |
|
"grad_norm": 0.04766610890656114, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.0275, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.4017991004497752, |
|
"grad_norm": 0.03981235051153443, |
|
"learning_rate": 4.977344668566275e-05, |
|
"loss": 0.0171, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.4032983508245878, |
|
"grad_norm": 0.050230959070964275, |
|
"learning_rate": 4.9547237618498085e-05, |
|
"loss": 0.0247, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.4047976011994003, |
|
"grad_norm": 0.04796385344740444, |
|
"learning_rate": 4.9321374348917585e-05, |
|
"loss": 0.0199, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.406296851574213, |
|
"grad_norm": 0.03650112855229006, |
|
"learning_rate": 4.909585842496287e-05, |
|
"loss": 0.0197, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.4077961019490255, |
|
"grad_norm": 0.058389964584798164, |
|
"learning_rate": 4.887069139229481e-05, |
|
"loss": 0.0305, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.409295352323838, |
|
"grad_norm": 0.03657890885550993, |
|
"learning_rate": 4.864587479418302e-05, |
|
"loss": 0.0172, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.4107946026986506, |
|
"grad_norm": 0.039034241468565725, |
|
"learning_rate": 4.842141017149526e-05, |
|
"loss": 0.0289, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.4122938530734632, |
|
"grad_norm": 0.05779842475140324, |
|
"learning_rate": 4.8197299062686995e-05, |
|
"loss": 0.0194, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.4137931034482758, |
|
"grad_norm": 0.04423489804284638, |
|
"learning_rate": 4.79735430037906e-05, |
|
"loss": 0.0167, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.4152923538230884, |
|
"grad_norm": 0.06732550480304075, |
|
"learning_rate": 4.7750143528405126e-05, |
|
"loss": 0.0234, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.416791604197901, |
|
"grad_norm": 0.048358500900887474, |
|
"learning_rate": 4.752710216768551e-05, |
|
"loss": 0.0126, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.4182908545727138, |
|
"grad_norm": 0.0618555856514782, |
|
"learning_rate": 4.7304420450332244e-05, |
|
"loss": 0.0531, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.419790104947526, |
|
"grad_norm": 0.021844937288201197, |
|
"learning_rate": 4.708209990258095e-05, |
|
"loss": 0.0092, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.421289355322339, |
|
"grad_norm": 0.043762847247643975, |
|
"learning_rate": 4.68601420481917e-05, |
|
"loss": 0.0186, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.4227886056971515, |
|
"grad_norm": 0.08941262921253182, |
|
"learning_rate": 4.6638548408438856e-05, |
|
"loss": 0.033, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.424287856071964, |
|
"grad_norm": 0.07137680432326722, |
|
"learning_rate": 4.6417320502100316e-05, |
|
"loss": 0.019, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.4257871064467766, |
|
"grad_norm": 0.05581448919899184, |
|
"learning_rate": 4.619645984544752e-05, |
|
"loss": 0.0175, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.4272863568215892, |
|
"grad_norm": 0.029522071224618904, |
|
"learning_rate": 4.59759679522345e-05, |
|
"loss": 0.0191, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.4287856071964018, |
|
"grad_norm": 0.03786913455549227, |
|
"learning_rate": 4.575584633368815e-05, |
|
"loss": 0.0197, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.4302848575712144, |
|
"grad_norm": 0.059428163125673714, |
|
"learning_rate": 4.5536096498497295e-05, |
|
"loss": 0.0296, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.431784107946027, |
|
"grad_norm": 0.03276712389103159, |
|
"learning_rate": 4.5316719952802776e-05, |
|
"loss": 0.0206, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.4332833583208395, |
|
"grad_norm": 0.0411950835656051, |
|
"learning_rate": 4.5097718200186814e-05, |
|
"loss": 0.0147, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.434782608695652, |
|
"grad_norm": 0.03726680229656808, |
|
"learning_rate": 4.4879092741663e-05, |
|
"loss": 0.0135, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.4362818590704647, |
|
"grad_norm": 0.051339763257998224, |
|
"learning_rate": 4.46608450756656e-05, |
|
"loss": 0.0294, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.4377811094452775, |
|
"grad_norm": 0.06516988257924636, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.0377, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.4392803598200898, |
|
"grad_norm": 0.04522777428706767, |
|
"learning_rate": 4.4225489102030995e-05, |
|
"loss": 0.0164, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.4407796101949026, |
|
"grad_norm": 0.057957991417301825, |
|
"learning_rate": 4.400838377827483e-05, |
|
"loss": 0.0292, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.4422788605697152, |
|
"grad_norm": 0.04382325188747058, |
|
"learning_rate": 4.379166221478697e-05, |
|
"loss": 0.013, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.4437781109445278, |
|
"grad_norm": 0.045850857607972945, |
|
"learning_rate": 4.35753258969526e-05, |
|
"loss": 0.0292, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.4452773613193404, |
|
"grad_norm": 0.052319076851113866, |
|
"learning_rate": 4.335937630751674e-05, |
|
"loss": 0.0203, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.446776611694153, |
|
"grad_norm": 0.04430763147098959, |
|
"learning_rate": 4.31438149265736e-05, |
|
"loss": 0.0174, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.4482758620689655, |
|
"grad_norm": 0.037336734388071224, |
|
"learning_rate": 4.2928643231556844e-05, |
|
"loss": 0.0187, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.449775112443778, |
|
"grad_norm": 0.03488717623172854, |
|
"learning_rate": 4.271386269722909e-05, |
|
"loss": 0.0215, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.4512743628185907, |
|
"grad_norm": 0.11146756348958964, |
|
"learning_rate": 4.249947479567218e-05, |
|
"loss": 0.0632, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.4527736131934033, |
|
"grad_norm": 0.0723346597125285, |
|
"learning_rate": 4.228548099627665e-05, |
|
"loss": 0.022, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.4542728635682158, |
|
"grad_norm": 0.05077819533323603, |
|
"learning_rate": 4.207188276573214e-05, |
|
"loss": 0.0262, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.4557721139430284, |
|
"grad_norm": 0.04220137883748374, |
|
"learning_rate": 4.185868156801694e-05, |
|
"loss": 0.022, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.4572713643178412, |
|
"grad_norm": 0.04257073910089388, |
|
"learning_rate": 4.1645878864388266e-05, |
|
"loss": 0.0188, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.4587706146926536, |
|
"grad_norm": 0.07051244032099359, |
|
"learning_rate": 4.143347611337194e-05, |
|
"loss": 0.0229, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.4602698650674664, |
|
"grad_norm": 0.04924278467634398, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 0.02, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.461769115442279, |
|
"grad_norm": 0.04465214846691316, |
|
"learning_rate": 4.1009876289563976e-05, |
|
"loss": 0.0204, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.4632683658170915, |
|
"grad_norm": 0.04262641492883479, |
|
"learning_rate": 4.0798682120078044e-05, |
|
"loss": 0.0192, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.464767616191904, |
|
"grad_norm": 0.03647273601340537, |
|
"learning_rate": 4.058789370979615e-05, |
|
"loss": 0.0184, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.4662668665667167, |
|
"grad_norm": 0.04885135157810087, |
|
"learning_rate": 4.037751250343841e-05, |
|
"loss": 0.0303, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.4677661169415293, |
|
"grad_norm": 0.08387069269883196, |
|
"learning_rate": 4.01675399429341e-05, |
|
"loss": 0.0234, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.4692653673163418, |
|
"grad_norm": 0.038204055040209454, |
|
"learning_rate": 3.9957977467411615e-05, |
|
"loss": 0.0179, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.4707646176911544, |
|
"grad_norm": 0.03786765301339409, |
|
"learning_rate": 3.9748826513188686e-05, |
|
"loss": 0.0165, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.472263868065967, |
|
"grad_norm": 0.05231654154725003, |
|
"learning_rate": 3.954008851376252e-05, |
|
"loss": 0.0172, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.4737631184407796, |
|
"grad_norm": 0.03335030350491901, |
|
"learning_rate": 3.933176489980005e-05, |
|
"loss": 0.0106, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.4752623688155921, |
|
"grad_norm": 0.03979273183143895, |
|
"learning_rate": 3.9123857099127936e-05, |
|
"loss": 0.0212, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.4767616191904047, |
|
"grad_norm": 0.05391309807180965, |
|
"learning_rate": 3.891636653672302e-05, |
|
"loss": 0.0157, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.4782608695652173, |
|
"grad_norm": 0.03361291341690922, |
|
"learning_rate": 3.8709294634702376e-05, |
|
"loss": 0.0139, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.47976011994003, |
|
"grad_norm": 0.035585947784928226, |
|
"learning_rate": 3.850264281231357e-05, |
|
"loss": 0.0219, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.4812593703148424, |
|
"grad_norm": 0.03333439340678102, |
|
"learning_rate": 3.829641248592515e-05, |
|
"loss": 0.0129, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.4827586206896552, |
|
"grad_norm": 0.040345561974199404, |
|
"learning_rate": 3.8090605069016595e-05, |
|
"loss": 0.0234, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.4842578710644678, |
|
"grad_norm": 0.07403742521938174, |
|
"learning_rate": 3.788522197216897e-05, |
|
"loss": 0.02, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.4857571214392804, |
|
"grad_norm": 0.05711176669065304, |
|
"learning_rate": 3.7680264603054994e-05, |
|
"loss": 0.0397, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.487256371814093, |
|
"grad_norm": 0.06610246075931671, |
|
"learning_rate": 3.747573436642951e-05, |
|
"loss": 0.0277, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.4887556221889056, |
|
"grad_norm": 0.03232609404738584, |
|
"learning_rate": 3.7271632664119846e-05, |
|
"loss": 0.0091, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.4902548725637181, |
|
"grad_norm": 0.03787459790735476, |
|
"learning_rate": 3.7067960895016275e-05, |
|
"loss": 0.0209, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.4917541229385307, |
|
"grad_norm": 0.04786121404829169, |
|
"learning_rate": 3.686472045506223e-05, |
|
"loss": 0.013, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.4932533733133433, |
|
"grad_norm": 0.04350531988383918, |
|
"learning_rate": 3.6661912737245e-05, |
|
"loss": 0.0292, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.4947526236881559, |
|
"grad_norm": 0.03931742385537951, |
|
"learning_rate": 3.645953913158593e-05, |
|
"loss": 0.027, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.4962518740629684, |
|
"grad_norm": 0.041467982689201134, |
|
"learning_rate": 3.6257601025131026e-05, |
|
"loss": 0.0238, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.497751124437781, |
|
"grad_norm": 0.04379142918036115, |
|
"learning_rate": 3.6056099801941534e-05, |
|
"loss": 0.0192, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.4992503748125938, |
|
"grad_norm": 0.03952572570808585, |
|
"learning_rate": 3.585503684308421e-05, |
|
"loss": 0.0185, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.5007496251874062, |
|
"grad_norm": 0.04682486078750102, |
|
"learning_rate": 3.565441352662211e-05, |
|
"loss": 0.0172, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.502248875562219, |
|
"grad_norm": 0.05532999211305211, |
|
"learning_rate": 3.545423122760493e-05, |
|
"loss": 0.0277, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.5037481259370313, |
|
"grad_norm": 0.06235019222714452, |
|
"learning_rate": 3.525449131805982e-05, |
|
"loss": 0.0319, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.5052473763118441, |
|
"grad_norm": 0.06024901707215258, |
|
"learning_rate": 3.5055195166981645e-05, |
|
"loss": 0.0375, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.5067466266866567, |
|
"grad_norm": 0.0678387358265547, |
|
"learning_rate": 3.4856344140323985e-05, |
|
"loss": 0.0281, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.5082458770614693, |
|
"grad_norm": 0.054204263066234194, |
|
"learning_rate": 3.465793960098945e-05, |
|
"loss": 0.0434, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.5097451274362819, |
|
"grad_norm": 0.045371529886557356, |
|
"learning_rate": 3.445998290882062e-05, |
|
"loss": 0.0225, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.5112443778110944, |
|
"grad_norm": 0.12719949916957105, |
|
"learning_rate": 3.426247542059041e-05, |
|
"loss": 0.0482, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.512743628185907, |
|
"grad_norm": 0.039729045535986905, |
|
"learning_rate": 3.406541848999312e-05, |
|
"loss": 0.0135, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.5142428785607196, |
|
"grad_norm": 0.07776064383164068, |
|
"learning_rate": 3.386881346763483e-05, |
|
"loss": 0.0344, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.5157421289355324, |
|
"grad_norm": 0.031920259092681355, |
|
"learning_rate": 3.367266170102432e-05, |
|
"loss": 0.0123, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.5172413793103448, |
|
"grad_norm": 0.03993466785133325, |
|
"learning_rate": 3.347696453456393e-05, |
|
"loss": 0.0195, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.5187406296851576, |
|
"grad_norm": 0.03649095428535925, |
|
"learning_rate": 3.328172330954001e-05, |
|
"loss": 0.0127, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.52023988005997, |
|
"grad_norm": 0.05184896776912534, |
|
"learning_rate": 3.308693936411421e-05, |
|
"loss": 0.0389, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.5217391304347827, |
|
"grad_norm": 0.041105616161891526, |
|
"learning_rate": 3.289261403331372e-05, |
|
"loss": 0.0273, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.523238380809595, |
|
"grad_norm": 0.05332829161038912, |
|
"learning_rate": 3.269874864902269e-05, |
|
"loss": 0.0312, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.5247376311844079, |
|
"grad_norm": 0.029829522522220115, |
|
"learning_rate": 3.2505344539972705e-05, |
|
"loss": 0.0122, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.5262368815592204, |
|
"grad_norm": 0.05136530388164031, |
|
"learning_rate": 3.231240303173394e-05, |
|
"loss": 0.0128, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.527736131934033, |
|
"grad_norm": 0.026747951417845865, |
|
"learning_rate": 3.211992544670582e-05, |
|
"loss": 0.0139, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.5292353823088456, |
|
"grad_norm": 0.043634723111218424, |
|
"learning_rate": 3.192791310410822e-05, |
|
"loss": 0.0145, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.5307346326836582, |
|
"grad_norm": 0.032266616085346804, |
|
"learning_rate": 3.173636731997222e-05, |
|
"loss": 0.0136, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.5322338830584707, |
|
"grad_norm": 0.05249669591805282, |
|
"learning_rate": 3.154528940713113e-05, |
|
"loss": 0.0216, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.5337331334332833, |
|
"grad_norm": 0.04769500104005904, |
|
"learning_rate": 3.1354680675211635e-05, |
|
"loss": 0.0165, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.535232383808096, |
|
"grad_norm": 0.035229159668666236, |
|
"learning_rate": 3.116454243062459e-05, |
|
"loss": 0.0194, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.5367316341829085, |
|
"grad_norm": 0.05407112124934083, |
|
"learning_rate": 3.0974875976556284e-05, |
|
"loss": 0.0236, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.5382308845577213, |
|
"grad_norm": 0.025074397773416118, |
|
"learning_rate": 3.078568261295933e-05, |
|
"loss": 0.0141, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.5397301349325336, |
|
"grad_norm": 0.0445437291021861, |
|
"learning_rate": 3.059696363654386e-05, |
|
"loss": 0.0186, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.5412293853073464, |
|
"grad_norm": 0.0441384879409754, |
|
"learning_rate": 3.0408720340768572e-05, |
|
"loss": 0.0139, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.5427286356821588, |
|
"grad_norm": 0.0479088329800761, |
|
"learning_rate": 3.0220954015832003e-05, |
|
"loss": 0.0302, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.5442278860569716, |
|
"grad_norm": 0.034357781660932715, |
|
"learning_rate": 3.0033665948663448e-05, |
|
"loss": 0.0179, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.545727136431784, |
|
"grad_norm": 0.04351013918478569, |
|
"learning_rate": 2.9846857422914433e-05, |
|
"loss": 0.0237, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.5472263868065967, |
|
"grad_norm": 0.052463537989510735, |
|
"learning_rate": 2.9660529718949627e-05, |
|
"loss": 0.0264, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.5487256371814093, |
|
"grad_norm": 0.03561598179282845, |
|
"learning_rate": 2.9474684113838257e-05, |
|
"loss": 0.0199, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.550224887556222, |
|
"grad_norm": 0.03365987985086509, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.0122, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 0.043573488595870076, |
|
"learning_rate": 2.910444429192265e-05, |
|
"loss": 0.0203, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.553223388305847, |
|
"grad_norm": 0.03592975923317004, |
|
"learning_rate": 2.8920052612700754e-05, |
|
"loss": 0.0148, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.5547226386806596, |
|
"grad_norm": 0.03740369119589898, |
|
"learning_rate": 2.8736148107479467e-05, |
|
"loss": 0.0122, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.5562218890554722, |
|
"grad_norm": 0.05866623225512895, |
|
"learning_rate": 2.8552732036719687e-05, |
|
"loss": 0.026, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.557721139430285, |
|
"grad_norm": 0.037514551907470446, |
|
"learning_rate": 2.8369805657534575e-05, |
|
"loss": 0.0101, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.5592203898050974, |
|
"grad_norm": 0.06919743559839049, |
|
"learning_rate": 2.8187370223681132e-05, |
|
"loss": 0.0186, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.5607196401799102, |
|
"grad_norm": 0.10860031837919593, |
|
"learning_rate": 2.800542698555132e-05, |
|
"loss": 0.0331, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.5622188905547225, |
|
"grad_norm": 0.04053934454593709, |
|
"learning_rate": 2.7823977190163786e-05, |
|
"loss": 0.0304, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.5637181409295353, |
|
"grad_norm": 0.04589812353371874, |
|
"learning_rate": 2.764302208115508e-05, |
|
"loss": 0.0288, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.5652173913043477, |
|
"grad_norm": 0.051759071563514204, |
|
"learning_rate": 2.746256289877126e-05, |
|
"loss": 0.0221, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.5667166416791605, |
|
"grad_norm": 0.04419463652932288, |
|
"learning_rate": 2.728260087985933e-05, |
|
"loss": 0.021, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.568215892053973, |
|
"grad_norm": 0.044966668706100936, |
|
"learning_rate": 2.7103137257858868e-05, |
|
"loss": 0.0337, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.5697151424287856, |
|
"grad_norm": 0.04302495760541486, |
|
"learning_rate": 2.6924173262793383e-05, |
|
"loss": 0.0137, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.5712143928035982, |
|
"grad_norm": 0.08218579196147292, |
|
"learning_rate": 2.6745710121262136e-05, |
|
"loss": 0.0365, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.5727136431784108, |
|
"grad_norm": 0.061167493252746986, |
|
"learning_rate": 2.6567749056431467e-05, |
|
"loss": 0.0258, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.5742128935532234, |
|
"grad_norm": 0.13364423709623616, |
|
"learning_rate": 2.639029128802657e-05, |
|
"loss": 0.0626, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.575712143928036, |
|
"grad_norm": 0.03214675852865434, |
|
"learning_rate": 2.6213338032323176e-05, |
|
"loss": 0.0133, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.5772113943028487, |
|
"grad_norm": 0.06328373661244126, |
|
"learning_rate": 2.603689050213902e-05, |
|
"loss": 0.0243, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.578710644677661, |
|
"grad_norm": 0.04742194356052198, |
|
"learning_rate": 2.5860949906825795e-05, |
|
"loss": 0.0285, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.580209895052474, |
|
"grad_norm": 0.048588116559285674, |
|
"learning_rate": 2.5685517452260567e-05, |
|
"loss": 0.0307, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.5817091454272862, |
|
"grad_norm": 0.04993527437674772, |
|
"learning_rate": 2.5510594340837824e-05, |
|
"loss": 0.0232, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.583208395802099, |
|
"grad_norm": 0.043690403745079286, |
|
"learning_rate": 2.5336181771460876e-05, |
|
"loss": 0.0205, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.5847076461769114, |
|
"grad_norm": 0.07438950423299034, |
|
"learning_rate": 2.516228093953402e-05, |
|
"loss": 0.0306, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.5862068965517242, |
|
"grad_norm": 0.03036817361902045, |
|
"learning_rate": 2.4988893036954043e-05, |
|
"loss": 0.0148, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.5877061469265368, |
|
"grad_norm": 0.0330965964673103, |
|
"learning_rate": 2.4816019252102273e-05, |
|
"loss": 0.0195, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.5892053973013494, |
|
"grad_norm": 0.035050676413594786, |
|
"learning_rate": 2.464366076983623e-05, |
|
"loss": 0.0136, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.590704647676162, |
|
"grad_norm": 0.04101060544019866, |
|
"learning_rate": 2.4471818771481648e-05, |
|
"loss": 0.0141, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.5922038980509745, |
|
"grad_norm": 0.06280027292019925, |
|
"learning_rate": 2.4300494434824373e-05, |
|
"loss": 0.0224, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.593703148425787, |
|
"grad_norm": 0.04131360603164168, |
|
"learning_rate": 2.412968893410218e-05, |
|
"loss": 0.017, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.5952023988005997, |
|
"grad_norm": 0.03827142257211483, |
|
"learning_rate": 2.3959403439996907e-05, |
|
"loss": 0.0221, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.5967016491754122, |
|
"grad_norm": 0.06504367581713787, |
|
"learning_rate": 2.3789639119626216e-05, |
|
"loss": 0.0267, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.5982008995502248, |
|
"grad_norm": 0.06818969255684648, |
|
"learning_rate": 2.362039713653581e-05, |
|
"loss": 0.0228, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.5997001499250376, |
|
"grad_norm": 0.038392151336867984, |
|
"learning_rate": 2.34516786506912e-05, |
|
"loss": 0.0154, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.60119940029985, |
|
"grad_norm": 0.06573629069388562, |
|
"learning_rate": 2.328348481847006e-05, |
|
"loss": 0.0251, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.6026986506746628, |
|
"grad_norm": 0.06148804102636816, |
|
"learning_rate": 2.3115816792654056e-05, |
|
"loss": 0.0311, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.6041979010494751, |
|
"grad_norm": 0.04471759363710799, |
|
"learning_rate": 2.2948675722421086e-05, |
|
"loss": 0.0191, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.605697151424288, |
|
"grad_norm": 0.0415533636509639, |
|
"learning_rate": 2.2782062753337306e-05, |
|
"loss": 0.0184, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.6071964017991005, |
|
"grad_norm": 0.05794733183745205, |
|
"learning_rate": 2.2615979027349387e-05, |
|
"loss": 0.0255, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.608695652173913, |
|
"grad_norm": 0.0397678166261267, |
|
"learning_rate": 2.2450425682776565e-05, |
|
"loss": 0.0218, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.6101949025487257, |
|
"grad_norm": 0.04761970893285829, |
|
"learning_rate": 2.2285403854302912e-05, |
|
"loss": 0.0195, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.6116941529235382, |
|
"grad_norm": 0.054708750805224075, |
|
"learning_rate": 2.2120914672969606e-05, |
|
"loss": 0.0207, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.6131934032983508, |
|
"grad_norm": 0.04536721690981569, |
|
"learning_rate": 2.195695926616702e-05, |
|
"loss": 0.0115, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.6146926536731634, |
|
"grad_norm": 0.03693822651785582, |
|
"learning_rate": 2.1793538757627218e-05, |
|
"loss": 0.0269, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.616191904047976, |
|
"grad_norm": 0.05430171107827179, |
|
"learning_rate": 2.163065426741603e-05, |
|
"loss": 0.0234, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.6176911544227885, |
|
"grad_norm": 0.03499504155734339, |
|
"learning_rate": 2.146830691192553e-05, |
|
"loss": 0.0215, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.6191904047976013, |
|
"grad_norm": 0.041779929626990946, |
|
"learning_rate": 2.1306497803866277e-05, |
|
"loss": 0.0219, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.6206896551724137, |
|
"grad_norm": 0.05662420578938302, |
|
"learning_rate": 2.1145228052259824e-05, |
|
"loss": 0.0266, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.6221889055472265, |
|
"grad_norm": 0.0632358405041289, |
|
"learning_rate": 2.098449876243096e-05, |
|
"loss": 0.0513, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.6236881559220389, |
|
"grad_norm": 0.03370533424123817, |
|
"learning_rate": 2.0824311036000276e-05, |
|
"loss": 0.0164, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.6251874062968517, |
|
"grad_norm": 0.03757323666893171, |
|
"learning_rate": 2.0664665970876496e-05, |
|
"loss": 0.0158, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.626686656671664, |
|
"grad_norm": 0.03313883661650599, |
|
"learning_rate": 2.0505564661249e-05, |
|
"loss": 0.0164, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.6281859070464768, |
|
"grad_norm": 0.04129568741796651, |
|
"learning_rate": 2.0347008197580374e-05, |
|
"loss": 0.0262, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.6296851574212894, |
|
"grad_norm": 0.050633574006442895, |
|
"learning_rate": 2.0188997666598854e-05, |
|
"loss": 0.0263, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.631184407796102, |
|
"grad_norm": 0.062148357793015474, |
|
"learning_rate": 2.0031534151290943e-05, |
|
"loss": 0.0322, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.6326836581709145, |
|
"grad_norm": 0.045969526657587925, |
|
"learning_rate": 1.9874618730893946e-05, |
|
"loss": 0.0168, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.6341829085457271, |
|
"grad_norm": 0.04612858174233097, |
|
"learning_rate": 1.9718252480888566e-05, |
|
"loss": 0.0286, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.6356821589205397, |
|
"grad_norm": 0.040575925623268116, |
|
"learning_rate": 1.9562436472991552e-05, |
|
"loss": 0.019, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.6371814092953523, |
|
"grad_norm": 0.04417146500100262, |
|
"learning_rate": 1.9407171775148436e-05, |
|
"loss": 0.0202, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.638680659670165, |
|
"grad_norm": 0.036770078126215765, |
|
"learning_rate": 1.9252459451526007e-05, |
|
"loss": 0.0129, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.6401799100449774, |
|
"grad_norm": 0.0364801904957448, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 0.015, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.6416791604197902, |
|
"grad_norm": 0.0254972319896911, |
|
"learning_rate": 1.8944696164673946e-05, |
|
"loss": 0.0139, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.6431784107946026, |
|
"grad_norm": 0.047093621027718184, |
|
"learning_rate": 1.879164731081937e-05, |
|
"loss": 0.0252, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.6446776611694154, |
|
"grad_norm": 0.04485052170740798, |
|
"learning_rate": 1.863915504992131e-05, |
|
"loss": 0.0202, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.6461769115442277, |
|
"grad_norm": 0.0387698802952675, |
|
"learning_rate": 1.848722042714457e-05, |
|
"loss": 0.0117, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.6476761619190405, |
|
"grad_norm": 0.0690313351636082, |
|
"learning_rate": 1.833584448383211e-05, |
|
"loss": 0.0225, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.6491754122938531, |
|
"grad_norm": 0.041762472042913064, |
|
"learning_rate": 1.818502825749764e-05, |
|
"loss": 0.0189, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.6506746626686657, |
|
"grad_norm": 0.043282701400431646, |
|
"learning_rate": 1.8034772781818775e-05, |
|
"loss": 0.0352, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.6521739130434783, |
|
"grad_norm": 0.05165034058427283, |
|
"learning_rate": 1.78850790866296e-05, |
|
"loss": 0.0225, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.6536731634182908, |
|
"grad_norm": 0.02717101883001689, |
|
"learning_rate": 1.773594819791403e-05, |
|
"loss": 0.0193, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.6551724137931034, |
|
"grad_norm": 0.04662587755796276, |
|
"learning_rate": 1.7587381137798432e-05, |
|
"loss": 0.0178, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.656671664167916, |
|
"grad_norm": 0.031822487513949387, |
|
"learning_rate": 1.7439378924544835e-05, |
|
"loss": 0.0189, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.6581709145427288, |
|
"grad_norm": 0.03280026362696025, |
|
"learning_rate": 1.7291942572543807e-05, |
|
"loss": 0.0123, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.6596701649175412, |
|
"grad_norm": 0.030180458183959057, |
|
"learning_rate": 1.714507309230766e-05, |
|
"loss": 0.0141, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.661169415292354, |
|
"grad_norm": 0.05658463863566536, |
|
"learning_rate": 1.6998771490463262e-05, |
|
"loss": 0.0647, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.6626686656671663, |
|
"grad_norm": 0.040781014716140526, |
|
"learning_rate": 1.6853038769745467e-05, |
|
"loss": 0.016, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.6641679160419791, |
|
"grad_norm": 0.04738429854592745, |
|
"learning_rate": 1.6707875928990058e-05, |
|
"loss": 0.03, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.6656671664167915, |
|
"grad_norm": 0.025248986437211247, |
|
"learning_rate": 1.656328396312682e-05, |
|
"loss": 0.0119, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.6671664167916043, |
|
"grad_norm": 0.08248141981476365, |
|
"learning_rate": 1.6419263863172997e-05, |
|
"loss": 0.0401, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.6686656671664168, |
|
"grad_norm": 0.040376633264695476, |
|
"learning_rate": 1.6275816616226113e-05, |
|
"loss": 0.0162, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.6701649175412294, |
|
"grad_norm": 0.04326524362791187, |
|
"learning_rate": 1.6132943205457606e-05, |
|
"loss": 0.0174, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.671664167916042, |
|
"grad_norm": 0.04807125621491688, |
|
"learning_rate": 1.599064461010581e-05, |
|
"loss": 0.0324, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.6731634182908546, |
|
"grad_norm": 0.03729442007384064, |
|
"learning_rate": 1.5848921805469397e-05, |
|
"loss": 0.0211, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.6746626686656672, |
|
"grad_norm": 0.029001614577088476, |
|
"learning_rate": 1.5707775762900547e-05, |
|
"loss": 0.0129, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.6761619190404797, |
|
"grad_norm": 0.030669347972443403, |
|
"learning_rate": 1.5567207449798515e-05, |
|
"loss": 0.01, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.6776611694152923, |
|
"grad_norm": 0.04829682712396594, |
|
"learning_rate": 1.542721782960268e-05, |
|
"loss": 0.0232, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.6791604197901049, |
|
"grad_norm": 0.04806923915457913, |
|
"learning_rate": 1.528780786178631e-05, |
|
"loss": 0.0202, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.6806596701649177, |
|
"grad_norm": 0.02440255176031142, |
|
"learning_rate": 1.5148978501849642e-05, |
|
"loss": 0.0128, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.68215892053973, |
|
"grad_norm": 0.04063406158834547, |
|
"learning_rate": 1.5010730701313625e-05, |
|
"loss": 0.0171, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.6836581709145428, |
|
"grad_norm": 0.04035147945576393, |
|
"learning_rate": 1.4873065407713149e-05, |
|
"loss": 0.031, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.6851574212893552, |
|
"grad_norm": 0.04027060084795129, |
|
"learning_rate": 1.4735983564590783e-05, |
|
"loss": 0.0182, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.686656671664168, |
|
"grad_norm": 0.045280521178547195, |
|
"learning_rate": 1.4599486111490102e-05, |
|
"loss": 0.0198, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.6881559220389803, |
|
"grad_norm": 0.04881923000165193, |
|
"learning_rate": 1.4463573983949341e-05, |
|
"loss": 0.0167, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.6896551724137931, |
|
"grad_norm": 0.03529851016922019, |
|
"learning_rate": 1.4328248113495047e-05, |
|
"loss": 0.0165, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.6911544227886057, |
|
"grad_norm": 0.04376517979604228, |
|
"learning_rate": 1.4193509427635543e-05, |
|
"loss": 0.0266, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.6926536731634183, |
|
"grad_norm": 0.03839629594972581, |
|
"learning_rate": 1.405935884985473e-05, |
|
"loss": 0.0109, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.6941529235382309, |
|
"grad_norm": 0.05394784900443594, |
|
"learning_rate": 1.3925797299605647e-05, |
|
"loss": 0.0289, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.6956521739130435, |
|
"grad_norm": 0.04473750186048263, |
|
"learning_rate": 1.379282569230419e-05, |
|
"loss": 0.0183, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.697151424287856, |
|
"grad_norm": 0.03971094977709959, |
|
"learning_rate": 1.3660444939322836e-05, |
|
"loss": 0.0131, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.6986506746626686, |
|
"grad_norm": 0.044781528569248615, |
|
"learning_rate": 1.3528655947984504e-05, |
|
"loss": 0.0606, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.7001499250374814, |
|
"grad_norm": 0.05687647115833445, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 0.0261, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.7016491754122938, |
|
"grad_norm": 0.051596153458091924, |
|
"learning_rate": 1.3266856859242705e-05, |
|
"loss": 0.0217, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.7031484257871066, |
|
"grad_norm": 0.05150733833063215, |
|
"learning_rate": 1.3136848556180892e-05, |
|
"loss": 0.0306, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.704647676161919, |
|
"grad_norm": 0.042545970063710185, |
|
"learning_rate": 1.3007435603433039e-05, |
|
"loss": 0.0195, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.7061469265367317, |
|
"grad_norm": 0.044603161701574794, |
|
"learning_rate": 1.2878618887981064e-05, |
|
"loss": 0.0241, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.707646176911544, |
|
"grad_norm": 0.05764350301977242, |
|
"learning_rate": 1.2750399292720283e-05, |
|
"loss": 0.0216, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.7091454272863569, |
|
"grad_norm": 0.05335928285880417, |
|
"learning_rate": 1.262277769645348e-05, |
|
"loss": 0.0375, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.7106446776611695, |
|
"grad_norm": 0.036542466613040704, |
|
"learning_rate": 1.2495754973884766e-05, |
|
"loss": 0.0217, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.712143928035982, |
|
"grad_norm": 0.0317543900475218, |
|
"learning_rate": 1.2369331995613665e-05, |
|
"loss": 0.0089, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.7136431784107946, |
|
"grad_norm": 0.043953143850443466, |
|
"learning_rate": 1.224350962812908e-05, |
|
"loss": 0.0221, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.7151424287856072, |
|
"grad_norm": 0.04475451692804134, |
|
"learning_rate": 1.2118288733803473e-05, |
|
"loss": 0.0269, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.7166416791604198, |
|
"grad_norm": 0.0468528264899152, |
|
"learning_rate": 1.1993670170886806e-05, |
|
"loss": 0.0251, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.7181409295352323, |
|
"grad_norm": 0.04567111932909807, |
|
"learning_rate": 1.1869654793500784e-05, |
|
"loss": 0.0256, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.7196401799100451, |
|
"grad_norm": 0.04665397165143054, |
|
"learning_rate": 1.17462434516329e-05, |
|
"loss": 0.0287, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.7211394302848575, |
|
"grad_norm": 0.043566933804619214, |
|
"learning_rate": 1.1623436991130654e-05, |
|
"loss": 0.0193, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.7226386806596703, |
|
"grad_norm": 0.03910118483915222, |
|
"learning_rate": 1.1501236253695823e-05, |
|
"loss": 0.019, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 0.042811788583233146, |
|
"learning_rate": 1.1379642076878527e-05, |
|
"loss": 0.0192, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.7256371814092955, |
|
"grad_norm": 0.03259518496802405, |
|
"learning_rate": 1.1258655294071685e-05, |
|
"loss": 0.0114, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.7271364317841078, |
|
"grad_norm": 0.03610690907310535, |
|
"learning_rate": 1.1138276734505104e-05, |
|
"loss": 0.0088, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.7286356821589206, |
|
"grad_norm": 0.058981199182703795, |
|
"learning_rate": 1.1018507223240038e-05, |
|
"loss": 0.0303, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.7301349325337332, |
|
"grad_norm": 0.04288242904489575, |
|
"learning_rate": 1.0899347581163221e-05, |
|
"loss": 0.0316, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.7316341829085458, |
|
"grad_norm": 0.0322212652007745, |
|
"learning_rate": 1.0780798624981547e-05, |
|
"loss": 0.0127, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.7331334332833583, |
|
"grad_norm": 0.04036547160797265, |
|
"learning_rate": 1.0662861167216243e-05, |
|
"loss": 0.0268, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.734632683658171, |
|
"grad_norm": 0.027977601513989023, |
|
"learning_rate": 1.0545536016197499e-05, |
|
"loss": 0.0106, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.7361319340329835, |
|
"grad_norm": 0.04130410258519725, |
|
"learning_rate": 1.042882397605871e-05, |
|
"loss": 0.0164, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.737631184407796, |
|
"grad_norm": 0.09110404984904212, |
|
"learning_rate": 1.0312725846731175e-05, |
|
"loss": 0.032, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.7391304347826086, |
|
"grad_norm": 0.04675587064403581, |
|
"learning_rate": 1.0197242423938446e-05, |
|
"loss": 0.0112, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.7406296851574212, |
|
"grad_norm": 0.05310767217545907, |
|
"learning_rate": 1.008237449919096e-05, |
|
"loss": 0.0208, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.742128935532234, |
|
"grad_norm": 0.040653637020288395, |
|
"learning_rate": 9.968122859780648e-06, |
|
"loss": 0.0206, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.7436281859070464, |
|
"grad_norm": 0.04403126184179531, |
|
"learning_rate": 9.854488288775422e-06, |
|
"loss": 0.0151, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.7451274362818592, |
|
"grad_norm": 0.030076096749230896, |
|
"learning_rate": 9.74147156501396e-06, |
|
"loss": 0.0113, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.7466266866566715, |
|
"grad_norm": 0.029651097446938043, |
|
"learning_rate": 9.629073463100146e-06, |
|
"loss": 0.0082, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.7481259370314843, |
|
"grad_norm": 0.03931051532613931, |
|
"learning_rate": 9.517294753398064e-06, |
|
"loss": 0.0206, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.7496251874062967, |
|
"grad_norm": 0.04890773527042702, |
|
"learning_rate": 9.406136202026416e-06, |
|
"loss": 0.0186, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.7511244377811095, |
|
"grad_norm": 0.05681064457174664, |
|
"learning_rate": 9.295598570853514e-06, |
|
"loss": 0.0241, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.752623688155922, |
|
"grad_norm": 0.033925497614483074, |
|
"learning_rate": 9.185682617491863e-06, |
|
"loss": 0.0169, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.7541229385307346, |
|
"grad_norm": 0.050500607910895993, |
|
"learning_rate": 9.076389095293148e-06, |
|
"loss": 0.03, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.7556221889055472, |
|
"grad_norm": 0.03528769870947365, |
|
"learning_rate": 8.967718753342902e-06, |
|
"loss": 0.015, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.7571214392803598, |
|
"grad_norm": 0.03966138334448402, |
|
"learning_rate": 8.85967233645547e-06, |
|
"loss": 0.0151, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.7586206896551724, |
|
"grad_norm": 0.05472974585600949, |
|
"learning_rate": 8.752250585168931e-06, |
|
"loss": 0.0272, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.760119940029985, |
|
"grad_norm": 0.04658097856564392, |
|
"learning_rate": 8.645454235739903e-06, |
|
"loss": 0.0273, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.7616191904047978, |
|
"grad_norm": 0.04039569121015692, |
|
"learning_rate": 8.539284020138638e-06, |
|
"loss": 0.0137, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.76311844077961, |
|
"grad_norm": 0.04821730129601109, |
|
"learning_rate": 8.433740666043898e-06, |
|
"loss": 0.034, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.764617691154423, |
|
"grad_norm": 0.04511525491973174, |
|
"learning_rate": 8.328824896838006e-06, |
|
"loss": 0.0105, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.7661169415292353, |
|
"grad_norm": 0.04234182612968715, |
|
"learning_rate": 8.224537431601886e-06, |
|
"loss": 0.0159, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.767616191904048, |
|
"grad_norm": 0.0321447391016331, |
|
"learning_rate": 8.12087898511018e-06, |
|
"loss": 0.0133, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.7691154422788604, |
|
"grad_norm": 0.07341997220230499, |
|
"learning_rate": 8.017850267826232e-06, |
|
"loss": 0.0343, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.7706146926536732, |
|
"grad_norm": 0.05203785895813134, |
|
"learning_rate": 7.915451985897382e-06, |
|
"loss": 0.0321, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.7721139430284858, |
|
"grad_norm": 0.041084567895159414, |
|
"learning_rate": 7.81368484114996e-06, |
|
"loss": 0.0147, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.7736131934032984, |
|
"grad_norm": 0.031087393200696114, |
|
"learning_rate": 7.71254953108458e-06, |
|
"loss": 0.0095, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.775112443778111, |
|
"grad_norm": 0.058786196696954986, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 0.0343, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.7766116941529235, |
|
"grad_norm": 0.04113739414695195, |
|
"learning_rate": 7.512177183345026e-06, |
|
"loss": 0.0181, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.778110944527736, |
|
"grad_norm": 0.03883727700834806, |
|
"learning_rate": 7.412941519000527e-06, |
|
"loss": 0.0145, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.7796101949025487, |
|
"grad_norm": 0.041402364797904934, |
|
"learning_rate": 7.314340435987921e-06, |
|
"loss": 0.0147, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.7811094452773615, |
|
"grad_norm": 0.05299280675746249, |
|
"learning_rate": 7.216374610108012e-06, |
|
"loss": 0.0199, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.7826086956521738, |
|
"grad_norm": 0.02456671956395044, |
|
"learning_rate": 7.119044712807577e-06, |
|
"loss": 0.0107, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.7841079460269866, |
|
"grad_norm": 0.03171980923691591, |
|
"learning_rate": 7.022351411174866e-06, |
|
"loss": 0.0142, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.785607196401799, |
|
"grad_norm": 0.05325976441184379, |
|
"learning_rate": 6.92629536793491e-06, |
|
"loss": 0.0212, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.7871064467766118, |
|
"grad_norm": 0.04370623276607864, |
|
"learning_rate": 6.830877241445111e-06, |
|
"loss": 0.0246, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.7886056971514241, |
|
"grad_norm": 0.06432358936521287, |
|
"learning_rate": 6.736097685690601e-06, |
|
"loss": 0.0208, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.790104947526237, |
|
"grad_norm": 0.03404343307449647, |
|
"learning_rate": 6.6419573502798374e-06, |
|
"loss": 0.0125, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.7916041979010495, |
|
"grad_norm": 0.056376443801887534, |
|
"learning_rate": 6.548456880440135e-06, |
|
"loss": 0.0234, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.793103448275862, |
|
"grad_norm": 0.04235044723804527, |
|
"learning_rate": 6.455596917013273e-06, |
|
"loss": 0.0231, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.7946026986506747, |
|
"grad_norm": 0.03653428510070535, |
|
"learning_rate": 6.363378096451011e-06, |
|
"loss": 0.0245, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.7961019490254873, |
|
"grad_norm": 0.03721457535492326, |
|
"learning_rate": 6.2718010508108545e-06, |
|
"loss": 0.0133, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.7976011994002998, |
|
"grad_norm": 0.038261575822808695, |
|
"learning_rate": 6.180866407751595e-06, |
|
"loss": 0.0169, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.7991004497751124, |
|
"grad_norm": 0.044065953447000386, |
|
"learning_rate": 6.090574790529091e-06, |
|
"loss": 0.0167, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.800599700149925, |
|
"grad_norm": 0.04771815395917711, |
|
"learning_rate": 6.000926817991992e-06, |
|
"loss": 0.0181, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.8020989505247376, |
|
"grad_norm": 0.03612809486970171, |
|
"learning_rate": 5.911923104577455e-06, |
|
"loss": 0.0143, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.8035982008995504, |
|
"grad_norm": 0.06286516442714955, |
|
"learning_rate": 5.823564260306968e-06, |
|
"loss": 0.0287, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.8050974512743627, |
|
"grad_norm": 0.040689385196554954, |
|
"learning_rate": 5.735850890782157e-06, |
|
"loss": 0.0154, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.8065967016491755, |
|
"grad_norm": 0.05935355542909139, |
|
"learning_rate": 5.6487835971806515e-06, |
|
"loss": 0.0249, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.8080959520239879, |
|
"grad_norm": 0.026213533109652836, |
|
"learning_rate": 5.562362976251901e-06, |
|
"loss": 0.0102, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.8095952023988007, |
|
"grad_norm": 0.0235654511150688, |
|
"learning_rate": 5.476589620313188e-06, |
|
"loss": 0.0105, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.811094452773613, |
|
"grad_norm": 0.0635366912137273, |
|
"learning_rate": 5.39146411724547e-06, |
|
"loss": 0.018, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.8125937031484258, |
|
"grad_norm": 0.041487867509385566, |
|
"learning_rate": 5.306987050489442e-06, |
|
"loss": 0.0366, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.8140929535232384, |
|
"grad_norm": 0.04638926153042607, |
|
"learning_rate": 5.223158999041444e-06, |
|
"loss": 0.0252, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.815592203898051, |
|
"grad_norm": 0.060453405404883885, |
|
"learning_rate": 5.13998053744954e-06, |
|
"loss": 0.033, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.8170914542728636, |
|
"grad_norm": 0.026690496589016864, |
|
"learning_rate": 5.057452235809624e-06, |
|
"loss": 0.0136, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.8185907046476761, |
|
"grad_norm": 0.04363010283975346, |
|
"learning_rate": 4.975574659761406e-06, |
|
"loss": 0.0228, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.8200899550224887, |
|
"grad_norm": 0.046092550493469546, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 0.0261, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.8215892053973013, |
|
"grad_norm": 0.05531628192606062, |
|
"learning_rate": 4.8137739246952195e-06, |
|
"loss": 0.0145, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.823088455772114, |
|
"grad_norm": 0.08123174441052526, |
|
"learning_rate": 4.733851874641382e-06, |
|
"loss": 0.0317, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.8245877061469264, |
|
"grad_norm": 0.04288348187685808, |
|
"learning_rate": 4.65458276809988e-06, |
|
"loss": 0.0203, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.8260869565217392, |
|
"grad_norm": 0.04334355640597478, |
|
"learning_rate": 4.575967148372317e-06, |
|
"loss": 0.0158, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.8275862068965516, |
|
"grad_norm": 0.037010861171650276, |
|
"learning_rate": 4.498005554281337e-06, |
|
"loss": 0.0173, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.8290854572713644, |
|
"grad_norm": 0.0389453296408419, |
|
"learning_rate": 4.420698520166988e-06, |
|
"loss": 0.0125, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.8305847076461768, |
|
"grad_norm": 0.04714273729670721, |
|
"learning_rate": 4.34404657588302e-06, |
|
"loss": 0.0214, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.8320839580209896, |
|
"grad_norm": 0.07183803253489271, |
|
"learning_rate": 4.268050246793276e-06, |
|
"loss": 0.0633, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.8335832083958021, |
|
"grad_norm": 0.03354509726743248, |
|
"learning_rate": 4.19271005376809e-06, |
|
"loss": 0.0209, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.8350824587706147, |
|
"grad_norm": 0.09728906022869646, |
|
"learning_rate": 4.118026513180695e-06, |
|
"loss": 0.0475, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.8365817091454273, |
|
"grad_norm": 0.03431271996396712, |
|
"learning_rate": 4.044000136903736e-06, |
|
"loss": 0.013, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.8380809595202399, |
|
"grad_norm": 0.041831243188060474, |
|
"learning_rate": 3.970631432305694e-06, |
|
"loss": 0.0266, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.8395802098950524, |
|
"grad_norm": 0.057870391772892306, |
|
"learning_rate": 3.897920902247465e-06, |
|
"loss": 0.0248, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.841079460269865, |
|
"grad_norm": 0.04691162453728428, |
|
"learning_rate": 3.825869045078867e-06, |
|
"loss": 0.0132, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.8425787106446778, |
|
"grad_norm": 0.03562764414364893, |
|
"learning_rate": 3.7544763546352834e-06, |
|
"loss": 0.0235, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.8440779610194902, |
|
"grad_norm": 0.06764557500741161, |
|
"learning_rate": 3.68374332023419e-06, |
|
"loss": 0.0416, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.845577211394303, |
|
"grad_norm": 0.042910585008751875, |
|
"learning_rate": 3.6136704266719113e-06, |
|
"loss": 0.0221, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.8470764617691153, |
|
"grad_norm": 0.038280889879914855, |
|
"learning_rate": 3.5442581542201923e-06, |
|
"loss": 0.0162, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.8485757121439281, |
|
"grad_norm": 0.05556984768768352, |
|
"learning_rate": 3.475506978623e-06, |
|
"loss": 0.023, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.8500749625187405, |
|
"grad_norm": 0.0503673855715429, |
|
"learning_rate": 3.40741737109318e-06, |
|
"loss": 0.016, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.8515742128935533, |
|
"grad_norm": 0.03274390981831604, |
|
"learning_rate": 3.339989798309273e-06, |
|
"loss": 0.0115, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.8530734632683659, |
|
"grad_norm": 0.036087042920603704, |
|
"learning_rate": 3.273224722412327e-06, |
|
"loss": 0.0136, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.8545727136431784, |
|
"grad_norm": 0.06008954234162343, |
|
"learning_rate": 3.2071226010026765e-06, |
|
"loss": 0.0199, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 1.856071964017991, |
|
"grad_norm": 0.04454845739153237, |
|
"learning_rate": 3.1416838871368924e-06, |
|
"loss": 0.0258, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.8575712143928036, |
|
"grad_norm": 0.04443343456113566, |
|
"learning_rate": 3.0769090293245705e-06, |
|
"loss": 0.0199, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.8590704647676162, |
|
"grad_norm": 0.06673692405990893, |
|
"learning_rate": 3.012798471525324e-06, |
|
"loss": 0.0633, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.8605697151424287, |
|
"grad_norm": 0.049212673297310765, |
|
"learning_rate": 2.949352653145754e-06, |
|
"loss": 0.0262, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 1.8620689655172413, |
|
"grad_norm": 0.06719806910277995, |
|
"learning_rate": 2.8865720090364034e-06, |
|
"loss": 0.0232, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.863568215892054, |
|
"grad_norm": 0.05588717349611258, |
|
"learning_rate": 2.8244569694887426e-06, |
|
"loss": 0.0136, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.8650674662668667, |
|
"grad_norm": 0.0404242627373259, |
|
"learning_rate": 2.7630079602323442e-06, |
|
"loss": 0.012, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.866566716641679, |
|
"grad_norm": 0.03967545521601158, |
|
"learning_rate": 2.7022254024318015e-06, |
|
"loss": 0.0175, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.8680659670164919, |
|
"grad_norm": 0.026424239288222608, |
|
"learning_rate": 2.6421097126839712e-06, |
|
"loss": 0.0143, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.8695652173913042, |
|
"grad_norm": 0.02958120459046709, |
|
"learning_rate": 2.582661303015066e-06, |
|
"loss": 0.0089, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.871064467766117, |
|
"grad_norm": 0.029911673211796365, |
|
"learning_rate": 2.5238805808778242e-06, |
|
"loss": 0.0098, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.8725637181409296, |
|
"grad_norm": 0.056981287060817974, |
|
"learning_rate": 2.465767949148734e-06, |
|
"loss": 0.0227, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.8740629685157422, |
|
"grad_norm": 0.0376261021688721, |
|
"learning_rate": 2.4083238061252567e-06, |
|
"loss": 0.0174, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.8755622188905547, |
|
"grad_norm": 0.034644589104120896, |
|
"learning_rate": 2.351548545523141e-06, |
|
"loss": 0.0131, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.8770614692653673, |
|
"grad_norm": 0.039399746567982966, |
|
"learning_rate": 2.295442556473637e-06, |
|
"loss": 0.0162, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.87856071964018, |
|
"grad_norm": 0.03207964462958592, |
|
"learning_rate": 2.240006223520941e-06, |
|
"loss": 0.0078, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.8800599700149925, |
|
"grad_norm": 0.05260614234208255, |
|
"learning_rate": 2.1852399266194314e-06, |
|
"loss": 0.0185, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.881559220389805, |
|
"grad_norm": 0.05106484042867738, |
|
"learning_rate": 2.1311440411312166e-06, |
|
"loss": 0.0226, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.8830584707646176, |
|
"grad_norm": 0.04263872143801973, |
|
"learning_rate": 2.0777189378234143e-06, |
|
"loss": 0.0117, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.8845577211394304, |
|
"grad_norm": 0.07846972233061164, |
|
"learning_rate": 2.024964982865729e-06, |
|
"loss": 0.0339, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.8860569715142428, |
|
"grad_norm": 0.029117608897231563, |
|
"learning_rate": 1.9728825378278246e-06, |
|
"loss": 0.0071, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.8875562218890556, |
|
"grad_norm": 0.0388167533672085, |
|
"learning_rate": 1.921471959676957e-06, |
|
"loss": 0.0123, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.889055472263868, |
|
"grad_norm": 0.04539482718528663, |
|
"learning_rate": 1.8707336007754873e-06, |
|
"loss": 0.0179, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.8905547226386807, |
|
"grad_norm": 0.045445265904142454, |
|
"learning_rate": 1.820667808878429e-06, |
|
"loss": 0.0219, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.892053973013493, |
|
"grad_norm": 0.028894890651339714, |
|
"learning_rate": 1.771274927131139e-06, |
|
"loss": 0.0145, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.893553223388306, |
|
"grad_norm": 0.042388630611865694, |
|
"learning_rate": 1.7225552940668632e-06, |
|
"loss": 0.0157, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.8950524737631185, |
|
"grad_norm": 0.06876839316091303, |
|
"learning_rate": 1.6745092436045494e-06, |
|
"loss": 0.0351, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 0.03573171364688593, |
|
"learning_rate": 1.6271371050464056e-06, |
|
"loss": 0.0237, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.8980509745127436, |
|
"grad_norm": 0.038482317398168285, |
|
"learning_rate": 1.580439203075812e-06, |
|
"loss": 0.0124, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.8995502248875562, |
|
"grad_norm": 0.05737954330837195, |
|
"learning_rate": 1.5344158577549229e-06, |
|
"loss": 0.0364, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.9010494752623688, |
|
"grad_norm": 0.04099948506323669, |
|
"learning_rate": 1.4890673845226133e-06, |
|
"loss": 0.0197, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.9025487256371814, |
|
"grad_norm": 0.03313456374887663, |
|
"learning_rate": 1.444394094192225e-06, |
|
"loss": 0.02, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.9040479760119942, |
|
"grad_norm": 0.043427692868520065, |
|
"learning_rate": 1.400396292949513e-06, |
|
"loss": 0.0202, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.9055472263868065, |
|
"grad_norm": 0.07327623247350938, |
|
"learning_rate": 1.3570742823504567e-06, |
|
"loss": 0.0245, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.9070464767616193, |
|
"grad_norm": 0.05525179025128676, |
|
"learning_rate": 1.3144283593192752e-06, |
|
"loss": 0.0246, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.9085457271364317, |
|
"grad_norm": 0.06295368734028475, |
|
"learning_rate": 1.272458816146349e-06, |
|
"loss": 0.0246, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.9100449775112445, |
|
"grad_norm": 0.034583926367543656, |
|
"learning_rate": 1.231165940486234e-06, |
|
"loss": 0.0155, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.9115442278860568, |
|
"grad_norm": 0.0437209002683007, |
|
"learning_rate": 1.1905500153556625e-06, |
|
"loss": 0.0162, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.9130434782608696, |
|
"grad_norm": 0.03904804270714857, |
|
"learning_rate": 1.1506113191316447e-06, |
|
"loss": 0.0154, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.9145427286356822, |
|
"grad_norm": 0.05420766292701498, |
|
"learning_rate": 1.1113501255495485e-06, |
|
"loss": 0.0195, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.9160419790104948, |
|
"grad_norm": 0.04566901515231431, |
|
"learning_rate": 1.0727667037011668e-06, |
|
"loss": 0.0125, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.9175412293853074, |
|
"grad_norm": 0.07198157977225833, |
|
"learning_rate": 1.0348613180329757e-06, |
|
"loss": 0.0219, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.91904047976012, |
|
"grad_norm": 0.032622544247452506, |
|
"learning_rate": 9.976342283442463e-07, |
|
"loss": 0.0085, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.9205397301349325, |
|
"grad_norm": 0.03912032612952811, |
|
"learning_rate": 9.610856897852682e-07, |
|
"loss": 0.0097, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.922038980509745, |
|
"grad_norm": 0.05117531076928059, |
|
"learning_rate": 9.252159528556403e-07, |
|
"loss": 0.0219, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.9235382308845579, |
|
"grad_norm": 0.048504658124269, |
|
"learning_rate": 8.900252634025274e-07, |
|
"loss": 0.0203, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.9250374812593702, |
|
"grad_norm": 0.06377251552840921, |
|
"learning_rate": 8.555138626189618e-07, |
|
"loss": 0.0235, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.926536731634183, |
|
"grad_norm": 0.0353549864848296, |
|
"learning_rate": 8.216819870422332e-07, |
|
"loss": 0.0141, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.9280359820089954, |
|
"grad_norm": 0.039798781616309425, |
|
"learning_rate": 7.885298685522235e-07, |
|
"loss": 0.0147, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.9295352323838082, |
|
"grad_norm": 0.03541629405787621, |
|
"learning_rate": 7.560577343698305e-07, |
|
"loss": 0.0102, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.9310344827586206, |
|
"grad_norm": 0.05561642747147643, |
|
"learning_rate": 7.242658070554464e-07, |
|
"loss": 0.0206, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.9325337331334334, |
|
"grad_norm": 0.03534178862057278, |
|
"learning_rate": 6.931543045073708e-07, |
|
"loss": 0.0095, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.934032983508246, |
|
"grad_norm": 0.04209596571556163, |
|
"learning_rate": 6.627234399603555e-07, |
|
"loss": 0.0245, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.9355322338830585, |
|
"grad_norm": 0.06513898157808579, |
|
"learning_rate": 6.32973421984151e-07, |
|
"loss": 0.026, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.937031484257871, |
|
"grad_norm": 0.04921099872743036, |
|
"learning_rate": 6.039044544820404e-07, |
|
"loss": 0.0194, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.9385307346326837, |
|
"grad_norm": 0.031094335152381827, |
|
"learning_rate": 5.755167366894632e-07, |
|
"loss": 0.0121, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.9400299850074962, |
|
"grad_norm": 0.050223040034524, |
|
"learning_rate": 5.478104631726711e-07, |
|
"loss": 0.0173, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.9415292353823088, |
|
"grad_norm": 0.03520172367952888, |
|
"learning_rate": 5.207858238273522e-07, |
|
"loss": 0.019, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.9430284857571214, |
|
"grad_norm": 0.054375401258943634, |
|
"learning_rate": 4.944430038773762e-07, |
|
"loss": 0.0335, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.944527736131934, |
|
"grad_norm": 0.04736400384851895, |
|
"learning_rate": 4.6878218387346183e-07, |
|
"loss": 0.0245, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.9460269865067468, |
|
"grad_norm": 0.05631107325781845, |
|
"learning_rate": 4.438035396920004e-07, |
|
"loss": 0.0173, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.9475262368815591, |
|
"grad_norm": 0.02312088515804648, |
|
"learning_rate": 4.1950724253383423e-07, |
|
"loss": 0.0098, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.949025487256372, |
|
"grad_norm": 0.06415814349625676, |
|
"learning_rate": 3.958934589230467e-07, |
|
"loss": 0.0137, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.9505247376311843, |
|
"grad_norm": 0.042944785560430666, |
|
"learning_rate": 3.7296235070587435e-07, |
|
"loss": 0.0129, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.952023988005997, |
|
"grad_norm": 0.03691120345897048, |
|
"learning_rate": 3.50714075049563e-07, |
|
"loss": 0.0165, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.9535232383808094, |
|
"grad_norm": 0.047643378112865786, |
|
"learning_rate": 3.2914878444131326e-07, |
|
"loss": 0.0191, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.9550224887556222, |
|
"grad_norm": 0.04866045710270131, |
|
"learning_rate": 3.0826662668720364e-07, |
|
"loss": 0.0211, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.9565217391304348, |
|
"grad_norm": 0.035743634358242155, |
|
"learning_rate": 2.8806774491120234e-07, |
|
"loss": 0.0188, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.9580209895052474, |
|
"grad_norm": 0.0499970530068033, |
|
"learning_rate": 2.685522775541904e-07, |
|
"loss": 0.0275, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.95952023988006, |
|
"grad_norm": 0.043717836582837635, |
|
"learning_rate": 2.497203583729957e-07, |
|
"loss": 0.0223, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.9610194902548725, |
|
"grad_norm": 0.06115844054156324, |
|
"learning_rate": 2.315721164394713e-07, |
|
"loss": 0.0259, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.9625187406296851, |
|
"grad_norm": 0.04674640262799417, |
|
"learning_rate": 2.141076761396521e-07, |
|
"loss": 0.0296, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.9640179910044977, |
|
"grad_norm": 0.04102373844741974, |
|
"learning_rate": 1.973271571728441e-07, |
|
"loss": 0.0215, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.9655172413793105, |
|
"grad_norm": 0.05296227342955121, |
|
"learning_rate": 1.8123067455084741e-07, |
|
"loss": 0.0193, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.9670164917541229, |
|
"grad_norm": 0.04607714250992321, |
|
"learning_rate": 1.6581833859716788e-07, |
|
"loss": 0.0233, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.9685157421289357, |
|
"grad_norm": 0.05305402469014974, |
|
"learning_rate": 1.5109025494620677e-07, |
|
"loss": 0.0186, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.970014992503748, |
|
"grad_norm": 0.027178810058074054, |
|
"learning_rate": 1.3704652454261668e-07, |
|
"loss": 0.008, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.9715142428785608, |
|
"grad_norm": 0.037694527414950885, |
|
"learning_rate": 1.2368724364053563e-07, |
|
"loss": 0.0095, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.9730134932533732, |
|
"grad_norm": 0.05169984692035686, |
|
"learning_rate": 1.1101250380300965e-07, |
|
"loss": 0.022, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.974512743628186, |
|
"grad_norm": 0.05203649072494641, |
|
"learning_rate": 9.902239190124896e-08, |
|
"loss": 0.0205, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.9760119940029985, |
|
"grad_norm": 0.05317970284385352, |
|
"learning_rate": 8.771699011416168e-08, |
|
"loss": 0.0305, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.9775112443778111, |
|
"grad_norm": 0.046052084092380735, |
|
"learning_rate": 7.709637592770991e-08, |
|
"loss": 0.02, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.9790104947526237, |
|
"grad_norm": 0.020664658138814455, |
|
"learning_rate": 6.71606221343768e-08, |
|
"loss": 0.0056, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.9805097451274363, |
|
"grad_norm": 0.04220144244776692, |
|
"learning_rate": 5.790979683271136e-08, |
|
"loss": 0.0151, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.9820089955022488, |
|
"grad_norm": 0.04084288034944265, |
|
"learning_rate": 4.934396342684e-08, |
|
"loss": 0.0142, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.9835082458770614, |
|
"grad_norm": 0.05769798222724036, |
|
"learning_rate": 4.146318062603349e-08, |
|
"loss": 0.0226, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.9850074962518742, |
|
"grad_norm": 0.1100081838811883, |
|
"learning_rate": 3.4267502444274015e-08, |
|
"loss": 0.0365, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.9865067466266866, |
|
"grad_norm": 0.08427099553965144, |
|
"learning_rate": 2.7756978199944273e-08, |
|
"loss": 0.0217, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.9880059970014994, |
|
"grad_norm": 0.040887716983706905, |
|
"learning_rate": 2.193165251545004e-08, |
|
"loss": 0.0154, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.9895052473763117, |
|
"grad_norm": 0.057352690068278346, |
|
"learning_rate": 1.6791565316920388e-08, |
|
"loss": 0.0193, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.9910044977511245, |
|
"grad_norm": 0.03068600118328415, |
|
"learning_rate": 1.2336751833941229e-08, |
|
"loss": 0.0175, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.992503748125937, |
|
"grad_norm": 0.04984601232484485, |
|
"learning_rate": 8.567242599299973e-09, |
|
"loss": 0.0183, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.9940029985007497, |
|
"grad_norm": 0.04524382063881194, |
|
"learning_rate": 5.483063448785686e-09, |
|
"loss": 0.0199, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.9955022488755623, |
|
"grad_norm": 0.047304184345328626, |
|
"learning_rate": 3.0842355210336515e-09, |
|
"loss": 0.0228, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.9970014992503748, |
|
"grad_norm": 0.057086267630760426, |
|
"learning_rate": 1.3707752573255405e-09, |
|
"loss": 0.0232, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.9985007496251874, |
|
"grad_norm": 0.037788074538081265, |
|
"learning_rate": 3.426944015227918e-10, |
|
"loss": 0.0204, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.060579723830781834, |
|
"learning_rate": 0.0, |
|
"loss": 0.0344, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.05502813309431076, |
|
"eval_runtime": 759.9841, |
|
"eval_samples_per_second": 13.653, |
|
"eval_steps_per_second": 0.854, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1334, |
|
"total_flos": 2659138639233024.0, |
|
"train_loss": 0.03610908869839646, |
|
"train_runtime": 12784.2292, |
|
"train_samples_per_second": 3.339, |
|
"train_steps_per_second": 0.104 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1334, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2659138639233024.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|