|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995855781185247, |
|
"eval_steps": 500, |
|
"global_step": 1206, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008288437629506838, |
|
"grad_norm": 23.821738606267694, |
|
"learning_rate": 8.264462809917357e-08, |
|
"loss": 1.4077, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004144218814753419, |
|
"grad_norm": 22.834257268242467, |
|
"learning_rate": 4.132231404958678e-07, |
|
"loss": 1.4048, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008288437629506838, |
|
"grad_norm": 8.208014337309038, |
|
"learning_rate": 8.264462809917356e-07, |
|
"loss": 1.3206, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012432656444260257, |
|
"grad_norm": 10.88562979999232, |
|
"learning_rate": 1.2396694214876035e-06, |
|
"loss": 1.1572, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.016576875259013676, |
|
"grad_norm": 3.02410216068558, |
|
"learning_rate": 1.6528925619834712e-06, |
|
"loss": 1.0108, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.020721094073767096, |
|
"grad_norm": 2.9630819683565868, |
|
"learning_rate": 2.066115702479339e-06, |
|
"loss": 0.9599, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.024865312888520515, |
|
"grad_norm": 2.450927822041959, |
|
"learning_rate": 2.479338842975207e-06, |
|
"loss": 0.9202, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.029009531703273934, |
|
"grad_norm": 2.241659288093002, |
|
"learning_rate": 2.8925619834710743e-06, |
|
"loss": 0.9075, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03315375051802735, |
|
"grad_norm": 2.2795745210790708, |
|
"learning_rate": 3.3057851239669424e-06, |
|
"loss": 0.8904, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03729796933278077, |
|
"grad_norm": 2.2916121960785274, |
|
"learning_rate": 3.71900826446281e-06, |
|
"loss": 0.8778, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04144218814753419, |
|
"grad_norm": 3.0108486369173444, |
|
"learning_rate": 4.132231404958678e-06, |
|
"loss": 0.8811, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04558640696228761, |
|
"grad_norm": 2.34463490002114, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.8644, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04973062577704103, |
|
"grad_norm": 2.2048136988502836, |
|
"learning_rate": 4.958677685950414e-06, |
|
"loss": 0.8658, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.053874844591794445, |
|
"grad_norm": 2.360407044698775, |
|
"learning_rate": 5.371900826446281e-06, |
|
"loss": 0.8483, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05801906340654787, |
|
"grad_norm": 2.3750627177827437, |
|
"learning_rate": 5.785123966942149e-06, |
|
"loss": 0.829, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06216328222130128, |
|
"grad_norm": 2.5351719045640424, |
|
"learning_rate": 6.198347107438017e-06, |
|
"loss": 0.8457, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0663075010360547, |
|
"grad_norm": 2.4658487406561163, |
|
"learning_rate": 6.611570247933885e-06, |
|
"loss": 0.8295, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07045171985080813, |
|
"grad_norm": 2.552123954872006, |
|
"learning_rate": 7.0247933884297525e-06, |
|
"loss": 0.8248, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07459593866556154, |
|
"grad_norm": 2.5523240708243984, |
|
"learning_rate": 7.43801652892562e-06, |
|
"loss": 0.8073, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07874015748031496, |
|
"grad_norm": 2.6557558002422343, |
|
"learning_rate": 7.851239669421489e-06, |
|
"loss": 0.8227, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08288437629506838, |
|
"grad_norm": 2.5132033700665164, |
|
"learning_rate": 8.264462809917356e-06, |
|
"loss": 0.803, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0870285951098218, |
|
"grad_norm": 2.720513367073119, |
|
"learning_rate": 8.677685950413224e-06, |
|
"loss": 0.8023, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09117281392457521, |
|
"grad_norm": 2.9809398097562543, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.7967, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09531703273932864, |
|
"grad_norm": 2.6852876988337564, |
|
"learning_rate": 9.50413223140496e-06, |
|
"loss": 0.8083, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09946125155408206, |
|
"grad_norm": 2.4634580540100686, |
|
"learning_rate": 9.917355371900828e-06, |
|
"loss": 0.7892, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10360547036883548, |
|
"grad_norm": 2.583731945309341, |
|
"learning_rate": 9.999664652243188e-06, |
|
"loss": 0.8053, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.10774968918358889, |
|
"grad_norm": 2.6768428928051873, |
|
"learning_rate": 9.99830237907608e-06, |
|
"loss": 0.7899, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11189390799834231, |
|
"grad_norm": 2.550586632842143, |
|
"learning_rate": 9.995892506564461e-06, |
|
"loss": 0.7825, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11603812681309573, |
|
"grad_norm": 2.50256712970827, |
|
"learning_rate": 9.992435539796e-06, |
|
"loss": 0.7693, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12018234562784914, |
|
"grad_norm": 2.542375468259113, |
|
"learning_rate": 9.987932203319917e-06, |
|
"loss": 0.7716, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12432656444260257, |
|
"grad_norm": 2.354650001491699, |
|
"learning_rate": 9.982383440995146e-06, |
|
"loss": 0.7789, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12847078325735597, |
|
"grad_norm": 2.680500650181213, |
|
"learning_rate": 9.975790415792497e-06, |
|
"loss": 0.772, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1326150020721094, |
|
"grad_norm": 2.8324037133090827, |
|
"learning_rate": 9.968154509550914e-06, |
|
"loss": 0.7574, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13675922088686282, |
|
"grad_norm": 3.314558520418501, |
|
"learning_rate": 9.959477322687852e-06, |
|
"loss": 0.7434, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14090343970161626, |
|
"grad_norm": 2.558536518840115, |
|
"learning_rate": 9.949760673863846e-06, |
|
"loss": 0.7454, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14504765851636967, |
|
"grad_norm": 2.6087877711239247, |
|
"learning_rate": 9.93900659960133e-06, |
|
"loss": 0.7488, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.14919187733112307, |
|
"grad_norm": 2.4922551536546416, |
|
"learning_rate": 9.927217353857809e-06, |
|
"loss": 0.7453, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1533360961458765, |
|
"grad_norm": 2.4983517636117782, |
|
"learning_rate": 9.914395407553444e-06, |
|
"loss": 0.7441, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15748031496062992, |
|
"grad_norm": 2.493536518057786, |
|
"learning_rate": 9.900543448053164e-06, |
|
"loss": 0.7435, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16162453377538333, |
|
"grad_norm": 2.361368540133534, |
|
"learning_rate": 9.885664378603432e-06, |
|
"loss": 0.7216, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16576875259013676, |
|
"grad_norm": 2.2001360959731486, |
|
"learning_rate": 9.869761317723744e-06, |
|
"loss": 0.728, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16991297140489017, |
|
"grad_norm": 2.305194442427537, |
|
"learning_rate": 9.85283759855301e-06, |
|
"loss": 0.7263, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1740571902196436, |
|
"grad_norm": 2.208332719886559, |
|
"learning_rate": 9.834896768150963e-06, |
|
"loss": 0.7139, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17820140903439702, |
|
"grad_norm": 2.3264002622179887, |
|
"learning_rate": 9.81594258675473e-06, |
|
"loss": 0.7223, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18234562784915043, |
|
"grad_norm": 2.3609080234642983, |
|
"learning_rate": 9.795979026990717e-06, |
|
"loss": 0.7031, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18648984666390386, |
|
"grad_norm": 2.3331179014344055, |
|
"learning_rate": 9.775010273041975e-06, |
|
"loss": 0.715, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19063406547865727, |
|
"grad_norm": 2.4595571627938724, |
|
"learning_rate": 9.753040719771249e-06, |
|
"loss": 0.7084, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19477828429341068, |
|
"grad_norm": 2.5273928971647823, |
|
"learning_rate": 9.730074971799837e-06, |
|
"loss": 0.6928, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.19892250310816412, |
|
"grad_norm": 2.4481147916191324, |
|
"learning_rate": 9.706117842542517e-06, |
|
"loss": 0.684, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.20306672192291753, |
|
"grad_norm": 2.396714983793178, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.6942, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.20721094073767096, |
|
"grad_norm": 2.23212792235539, |
|
"learning_rate": 9.655249731699973e-06, |
|
"loss": 0.6846, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21135515955242437, |
|
"grad_norm": 2.33058592821463, |
|
"learning_rate": 9.628349411614503e-06, |
|
"loss": 0.6819, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.21549937836717778, |
|
"grad_norm": 2.3204394270788966, |
|
"learning_rate": 9.600479031008072e-06, |
|
"loss": 0.6773, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21964359718193122, |
|
"grad_norm": 2.2382725360662308, |
|
"learning_rate": 9.571644431262463e-06, |
|
"loss": 0.6649, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22378781599668462, |
|
"grad_norm": 2.2039133108908184, |
|
"learning_rate": 9.54185165585114e-06, |
|
"loss": 0.6537, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22793203481143803, |
|
"grad_norm": 2.4253359596794337, |
|
"learning_rate": 9.511106949072588e-06, |
|
"loss": 0.6621, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23207625362619147, |
|
"grad_norm": 2.4527370719290915, |
|
"learning_rate": 9.479416754741577e-06, |
|
"loss": 0.6694, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.23622047244094488, |
|
"grad_norm": 2.3817761190945514, |
|
"learning_rate": 9.446787714838579e-06, |
|
"loss": 0.6594, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2403646912556983, |
|
"grad_norm": 2.2516467162015594, |
|
"learning_rate": 9.413226668117679e-06, |
|
"loss": 0.6535, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24450891007045172, |
|
"grad_norm": 2.516472366579631, |
|
"learning_rate": 9.37874064867323e-06, |
|
"loss": 0.6558, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.24865312888520513, |
|
"grad_norm": 2.4095497698840336, |
|
"learning_rate": 9.343336884465577e-06, |
|
"loss": 0.6456, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25279734769995854, |
|
"grad_norm": 2.4199182947381024, |
|
"learning_rate": 9.307022795806125e-06, |
|
"loss": 0.6406, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.25694156651471195, |
|
"grad_norm": 2.282073481167692, |
|
"learning_rate": 9.26980599380213e-06, |
|
"loss": 0.6339, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2610857853294654, |
|
"grad_norm": 2.441801633470137, |
|
"learning_rate": 9.231694278761455e-06, |
|
"loss": 0.6326, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2652300041442188, |
|
"grad_norm": 12.39175458554138, |
|
"learning_rate": 9.192695638557723e-06, |
|
"loss": 0.6356, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26937422295897223, |
|
"grad_norm": 2.5618372097096525, |
|
"learning_rate": 9.1528182469561e-06, |
|
"loss": 0.6252, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27351844177372564, |
|
"grad_norm": 2.4608597563280967, |
|
"learning_rate": 9.112070461900178e-06, |
|
"loss": 0.6119, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.27766266058847905, |
|
"grad_norm": 2.312033920022348, |
|
"learning_rate": 9.070460823760197e-06, |
|
"loss": 0.6216, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.2818068794032325, |
|
"grad_norm": 2.2955010300704344, |
|
"learning_rate": 9.027998053543079e-06, |
|
"loss": 0.6216, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2859510982179859, |
|
"grad_norm": 2.2889198622068783, |
|
"learning_rate": 8.984691051064576e-06, |
|
"loss": 0.6104, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.29009531703273933, |
|
"grad_norm": 2.2147528522580844, |
|
"learning_rate": 8.94054889308395e-06, |
|
"loss": 0.6096, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.29423953584749274, |
|
"grad_norm": 2.3586059493212814, |
|
"learning_rate": 8.895580831401563e-06, |
|
"loss": 0.6126, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.29838375466224615, |
|
"grad_norm": 2.7570148244223924, |
|
"learning_rate": 8.849796290919787e-06, |
|
"loss": 0.6102, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3025279734769996, |
|
"grad_norm": 2.2730196820740667, |
|
"learning_rate": 8.803204867667624e-06, |
|
"loss": 0.6037, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.306672192291753, |
|
"grad_norm": 2.27164079566725, |
|
"learning_rate": 8.755816326789469e-06, |
|
"loss": 0.601, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.31081641110650643, |
|
"grad_norm": 2.2763704686366557, |
|
"learning_rate": 8.70764060049842e-06, |
|
"loss": 0.5829, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.31496062992125984, |
|
"grad_norm": 2.265603504905875, |
|
"learning_rate": 8.658687785994579e-06, |
|
"loss": 0.5957, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.31910484873601325, |
|
"grad_norm": 2.4701408773246203, |
|
"learning_rate": 8.608968143348765e-06, |
|
"loss": 0.5846, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.32324906755076666, |
|
"grad_norm": 2.2590604345543897, |
|
"learning_rate": 8.558492093352098e-06, |
|
"loss": 0.5948, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3273932863655201, |
|
"grad_norm": 2.112110217235179, |
|
"learning_rate": 8.50727021533189e-06, |
|
"loss": 0.5897, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.33153750518027353, |
|
"grad_norm": 2.2220762245644297, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 0.5686, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.33568172399502694, |
|
"grad_norm": 2.4588674796101087, |
|
"learning_rate": 8.402632071874348e-06, |
|
"loss": 0.5791, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.33982594280978035, |
|
"grad_norm": 2.120301054281535, |
|
"learning_rate": 8.349237737653288e-06, |
|
"loss": 0.5761, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.34397016162453375, |
|
"grad_norm": 2.27000349226283, |
|
"learning_rate": 8.29514143324466e-06, |
|
"loss": 0.5664, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3481143804392872, |
|
"grad_norm": 2.209196060426881, |
|
"learning_rate": 8.24035449674863e-06, |
|
"loss": 0.5598, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3522585992540406, |
|
"grad_norm": 2.765263815929266, |
|
"learning_rate": 8.184888411015655e-06, |
|
"loss": 0.5571, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.35640281806879404, |
|
"grad_norm": 2.3926237887826405, |
|
"learning_rate": 8.128754801239781e-06, |
|
"loss": 0.556, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.36054703688354744, |
|
"grad_norm": 2.4696018462025506, |
|
"learning_rate": 8.071965432522107e-06, |
|
"loss": 0.5614, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.36469125569830085, |
|
"grad_norm": 2.2772583844160614, |
|
"learning_rate": 8.01453220740492e-06, |
|
"loss": 0.5521, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.36883547451305426, |
|
"grad_norm": 2.374847612226747, |
|
"learning_rate": 7.956467163377037e-06, |
|
"loss": 0.5495, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.3729796933278077, |
|
"grad_norm": 2.2043358974290386, |
|
"learning_rate": 7.89778247035085e-06, |
|
"loss": 0.5427, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.37712391214256114, |
|
"grad_norm": 2.331068862386996, |
|
"learning_rate": 7.838490428111625e-06, |
|
"loss": 0.5449, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.38126813095731454, |
|
"grad_norm": 2.2139576546877713, |
|
"learning_rate": 7.77860346373957e-06, |
|
"loss": 0.5561, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.38541234977206795, |
|
"grad_norm": 2.1991958487671655, |
|
"learning_rate": 7.718134129005238e-06, |
|
"loss": 0.5419, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.38955656858682136, |
|
"grad_norm": 2.1821797937738396, |
|
"learning_rate": 7.657095097738793e-06, |
|
"loss": 0.5451, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3937007874015748, |
|
"grad_norm": 2.2186951662903063, |
|
"learning_rate": 7.59549916317367e-06, |
|
"loss": 0.5347, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.39784500621632823, |
|
"grad_norm": 2.3212871580558287, |
|
"learning_rate": 7.533359235265248e-06, |
|
"loss": 0.5382, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.40198922503108164, |
|
"grad_norm": 2.1540998726161753, |
|
"learning_rate": 7.470688337985029e-06, |
|
"loss": 0.5212, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.40613344384583505, |
|
"grad_norm": 2.1735121794127132, |
|
"learning_rate": 7.407499606590934e-06, |
|
"loss": 0.5312, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.41027766266058846, |
|
"grad_norm": 2.173386732621735, |
|
"learning_rate": 7.343806284874268e-06, |
|
"loss": 0.5273, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4144218814753419, |
|
"grad_norm": 2.127262862741766, |
|
"learning_rate": 7.279621722383939e-06, |
|
"loss": 0.5145, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.41856610029009533, |
|
"grad_norm": 2.146219494441019, |
|
"learning_rate": 7.214959371628522e-06, |
|
"loss": 0.5225, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.42271031910484874, |
|
"grad_norm": 2.046689251344904, |
|
"learning_rate": 7.149832785256718e-06, |
|
"loss": 0.511, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.42685453791960215, |
|
"grad_norm": 2.1558137201613423, |
|
"learning_rate": 7.084255613216855e-06, |
|
"loss": 0.5103, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.43099875673435556, |
|
"grad_norm": 2.240438771177761, |
|
"learning_rate": 7.018241599895974e-06, |
|
"loss": 0.5187, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.43514297554910897, |
|
"grad_norm": 2.1251451886790775, |
|
"learning_rate": 6.95180458123913e-06, |
|
"loss": 0.509, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.43928719436386243, |
|
"grad_norm": 2.1265555528429743, |
|
"learning_rate": 6.8849584818494984e-06, |
|
"loss": 0.5133, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.44343141317861584, |
|
"grad_norm": 2.1752821858955143, |
|
"learning_rate": 6.817717312069913e-06, |
|
"loss": 0.5102, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.44757563199336925, |
|
"grad_norm": 1.9742904998884219, |
|
"learning_rate": 6.750095165046415e-06, |
|
"loss": 0.4997, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.45171985080812266, |
|
"grad_norm": 2.131305839198853, |
|
"learning_rate": 6.682106213774459e-06, |
|
"loss": 0.5023, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.45586406962287607, |
|
"grad_norm": 2.31125121725231, |
|
"learning_rate": 6.6137647081283776e-06, |
|
"loss": 0.4836, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46000828843762953, |
|
"grad_norm": 2.2445463529407705, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.5052, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.46415250725238294, |
|
"grad_norm": 2.0590014501028078, |
|
"learning_rate": 6.476081399670212e-06, |
|
"loss": 0.4769, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.46829672606713635, |
|
"grad_norm": 2.1235457676935554, |
|
"learning_rate": 6.406768454044581e-06, |
|
"loss": 0.482, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.47244094488188976, |
|
"grad_norm": 2.245932227710593, |
|
"learning_rate": 6.337160662369519e-06, |
|
"loss": 0.4797, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.47658516369664317, |
|
"grad_norm": 2.4691296526493414, |
|
"learning_rate": 6.267272613813789e-06, |
|
"loss": 0.48, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4807293825113966, |
|
"grad_norm": 2.0821407072798377, |
|
"learning_rate": 6.19711895628548e-06, |
|
"loss": 0.4775, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.48487360132615004, |
|
"grad_norm": 2.2231462845429553, |
|
"learning_rate": 6.126714393361939e-06, |
|
"loss": 0.4791, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.48901782014090345, |
|
"grad_norm": 2.2327431903192756, |
|
"learning_rate": 6.056073681208038e-06, |
|
"loss": 0.4687, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.49316203895565686, |
|
"grad_norm": 2.171485152388387, |
|
"learning_rate": 5.985211625483403e-06, |
|
"loss": 0.4773, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.49730625777041026, |
|
"grad_norm": 2.383751329793211, |
|
"learning_rate": 5.914143078239293e-06, |
|
"loss": 0.4841, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5014504765851637, |
|
"grad_norm": 2.1730844882589158, |
|
"learning_rate": 5.842882934805731e-06, |
|
"loss": 0.479, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5055946953999171, |
|
"grad_norm": 2.072456765223728, |
|
"learning_rate": 5.771446130669589e-06, |
|
"loss": 0.4683, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5097389142146705, |
|
"grad_norm": 2.048482336533146, |
|
"learning_rate": 5.6998476383442345e-06, |
|
"loss": 0.4698, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5138831330294239, |
|
"grad_norm": 2.3268647107802702, |
|
"learning_rate": 5.628102464231429e-06, |
|
"loss": 0.4667, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5180273518441774, |
|
"grad_norm": 2.15935166681747, |
|
"learning_rate": 5.556225645476119e-06, |
|
"loss": 0.4719, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5221715706589308, |
|
"grad_norm": 2.2554049828411835, |
|
"learning_rate": 5.4842322468147926e-06, |
|
"loss": 0.4627, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 2.16547540035252, |
|
"learning_rate": 5.412137357418037e-06, |
|
"loss": 0.4542, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5304600082884376, |
|
"grad_norm": 2.205449973964332, |
|
"learning_rate": 5.339956087727985e-06, |
|
"loss": 0.4638, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.534604227103191, |
|
"grad_norm": 2.046592350153715, |
|
"learning_rate": 5.2677035662913116e-06, |
|
"loss": 0.4504, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5387484459179445, |
|
"grad_norm": 2.1937309067407114, |
|
"learning_rate": 5.195394936588409e-06, |
|
"loss": 0.4401, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5428926647326979, |
|
"grad_norm": 2.2350791342967797, |
|
"learning_rate": 5.123045353859465e-06, |
|
"loss": 0.4508, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5470368835474513, |
|
"grad_norm": 2.1319372468329645, |
|
"learning_rate": 5.050669981928056e-06, |
|
"loss": 0.4521, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5511811023622047, |
|
"grad_norm": 2.130774233274741, |
|
"learning_rate": 4.978283990022936e-06, |
|
"loss": 0.4493, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5553253211769581, |
|
"grad_norm": 2.008484642041683, |
|
"learning_rate": 4.905902549598719e-06, |
|
"loss": 0.4325, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5594695399917116, |
|
"grad_norm": 2.0574926912311344, |
|
"learning_rate": 4.833540831156062e-06, |
|
"loss": 0.4453, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.563613758806465, |
|
"grad_norm": 2.2090128918878174, |
|
"learning_rate": 4.761214001062079e-06, |
|
"loss": 0.4353, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5677579776212184, |
|
"grad_norm": 2.0771401122898197, |
|
"learning_rate": 4.688937218371592e-06, |
|
"loss": 0.4322, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5719021964359718, |
|
"grad_norm": 2.091649616042253, |
|
"learning_rate": 4.616725631649938e-06, |
|
"loss": 0.4353, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5760464152507252, |
|
"grad_norm": 2.140897800881621, |
|
"learning_rate": 4.544594375797969e-06, |
|
"loss": 0.4286, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.5801906340654787, |
|
"grad_norm": 2.3556886559066887, |
|
"learning_rate": 4.472558568879901e-06, |
|
"loss": 0.4291, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5843348528802321, |
|
"grad_norm": 2.061915937329502, |
|
"learning_rate": 4.400633308954713e-06, |
|
"loss": 0.4261, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5884790716949855, |
|
"grad_norm": 2.1754455938746218, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 0.4263, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5926232905097389, |
|
"grad_norm": 2.047663516312869, |
|
"learning_rate": 4.257174703311032e-06, |
|
"loss": 0.4326, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5967675093244923, |
|
"grad_norm": 2.002919898924837, |
|
"learning_rate": 4.185671425229477e-06, |
|
"loss": 0.429, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6009117281392458, |
|
"grad_norm": 2.055858095540348, |
|
"learning_rate": 4.11433882311277e-06, |
|
"loss": 0.4227, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6050559469539992, |
|
"grad_norm": 2.098430357886563, |
|
"learning_rate": 4.043191847634469e-06, |
|
"loss": 0.4145, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6092001657687526, |
|
"grad_norm": 2.113187936891634, |
|
"learning_rate": 3.9722454105624545e-06, |
|
"loss": 0.421, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.613344384583506, |
|
"grad_norm": 2.1821256779062397, |
|
"learning_rate": 3.901514381633555e-06, |
|
"loss": 0.4095, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6174886033982594, |
|
"grad_norm": 2.087497370021808, |
|
"learning_rate": 3.831013585436985e-06, |
|
"loss": 0.4209, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6216328222130129, |
|
"grad_norm": 2.1375456510365654, |
|
"learning_rate": 3.7607577983072486e-06, |
|
"loss": 0.4123, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6257770410277662, |
|
"grad_norm": 2.1081763007416034, |
|
"learning_rate": 3.6907617452271394e-06, |
|
"loss": 0.4049, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6299212598425197, |
|
"grad_norm": 2.1448026787250636, |
|
"learning_rate": 3.621040096741526e-06, |
|
"loss": 0.4159, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6340654786572731, |
|
"grad_norm": 2.1199166483261824, |
|
"learning_rate": 3.55160746588254e-06, |
|
"loss": 0.4071, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6382096974720265, |
|
"grad_norm": 2.023385824501021, |
|
"learning_rate": 3.482478405106803e-06, |
|
"loss": 0.409, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.64235391628678, |
|
"grad_norm": 2.049301320892124, |
|
"learning_rate": 3.4136674032453787e-06, |
|
"loss": 0.4114, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6464981351015333, |
|
"grad_norm": 2.049238108716136, |
|
"learning_rate": 3.34518888246703e-06, |
|
"loss": 0.4051, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6506423539162868, |
|
"grad_norm": 2.1220065162587036, |
|
"learning_rate": 3.2770571952554674e-06, |
|
"loss": 0.4012, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.6547865727310402, |
|
"grad_norm": 2.036047531360177, |
|
"learning_rate": 3.2092866214011984e-06, |
|
"loss": 0.4126, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6589307915457936, |
|
"grad_norm": 2.1291206967434593, |
|
"learning_rate": 3.141891365008609e-06, |
|
"loss": 0.4119, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.6630750103605471, |
|
"grad_norm": 2.07461368161324, |
|
"learning_rate": 3.0748855515189104e-06, |
|
"loss": 0.3945, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6672192291753004, |
|
"grad_norm": 1.9714918496227842, |
|
"learning_rate": 3.00828322474958e-06, |
|
"loss": 0.3954, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.6713634479900539, |
|
"grad_norm": 3.9519332717986444, |
|
"learning_rate": 2.942098343950891e-06, |
|
"loss": 0.4031, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6755076668048073, |
|
"grad_norm": 2.1077325411543293, |
|
"learning_rate": 2.8763447808801914e-06, |
|
"loss": 0.3884, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6796518856195607, |
|
"grad_norm": 2.025521267691112, |
|
"learning_rate": 2.8110363168944976e-06, |
|
"loss": 0.394, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6837961044343142, |
|
"grad_norm": 2.130847414216846, |
|
"learning_rate": 2.7461866400620506e-06, |
|
"loss": 0.3857, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.6879403232490675, |
|
"grad_norm": 2.0051322597023566, |
|
"learning_rate": 2.6818093422934254e-06, |
|
"loss": 0.3906, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.692084542063821, |
|
"grad_norm": 2.001609177757284, |
|
"learning_rate": 2.617917916492776e-06, |
|
"loss": 0.3921, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.6962287608785744, |
|
"grad_norm": 2.072211567891342, |
|
"learning_rate": 2.5545257537298497e-06, |
|
"loss": 0.3982, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7003729796933278, |
|
"grad_norm": 1.9935034929673958, |
|
"learning_rate": 2.491646140433346e-06, |
|
"loss": 0.3798, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7045171985080813, |
|
"grad_norm": 1.9339328870220762, |
|
"learning_rate": 2.4292922556061877e-06, |
|
"loss": 0.3853, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7086614173228346, |
|
"grad_norm": 2.1038621746542767, |
|
"learning_rate": 2.367477168063326e-06, |
|
"loss": 0.3817, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7128056361375881, |
|
"grad_norm": 2.0504707093576777, |
|
"learning_rate": 2.3062138336926406e-06, |
|
"loss": 0.3828, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7169498549523415, |
|
"grad_norm": 2.128613755893242, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 0.3854, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7210940737670949, |
|
"grad_norm": 2.021286433659744, |
|
"learning_rate": 2.185393667115513e-06, |
|
"loss": 0.3767, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7252382925818484, |
|
"grad_norm": 2.175790029610662, |
|
"learning_rate": 2.125862157732245e-06, |
|
"loss": 0.3752, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7293825113966017, |
|
"grad_norm": 2.055231327953937, |
|
"learning_rate": 2.066933041860059e-06, |
|
"loss": 0.3797, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7335267302113552, |
|
"grad_norm": 2.0609885500688745, |
|
"learning_rate": 2.0086186705130545e-06, |
|
"loss": 0.3798, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7376709490261085, |
|
"grad_norm": 1.9460493213850336, |
|
"learning_rate": 1.9509312658603954e-06, |
|
"loss": 0.3716, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.741815167840862, |
|
"grad_norm": 2.107550534612032, |
|
"learning_rate": 1.8938829186646484e-06, |
|
"loss": 0.3812, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.7459593866556155, |
|
"grad_norm": 1.9693264223542786, |
|
"learning_rate": 1.8374855857476687e-06, |
|
"loss": 0.371, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7501036054703688, |
|
"grad_norm": 2.0116271854033188, |
|
"learning_rate": 1.7817510874845585e-06, |
|
"loss": 0.3705, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.7542478242851223, |
|
"grad_norm": 1.9693053868651615, |
|
"learning_rate": 1.7266911053262196e-06, |
|
"loss": 0.3676, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7583920430998756, |
|
"grad_norm": 2.1020529435678186, |
|
"learning_rate": 1.6723171793510363e-06, |
|
"loss": 0.3737, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.7625362619146291, |
|
"grad_norm": 2.093530940289316, |
|
"learning_rate": 1.6186407058461622e-06, |
|
"loss": 0.3797, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7666804807293826, |
|
"grad_norm": 2.0472605324094326, |
|
"learning_rate": 1.5656729349189742e-06, |
|
"loss": 0.3716, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.7708246995441359, |
|
"grad_norm": 2.0335898500157925, |
|
"learning_rate": 1.5134249681391416e-06, |
|
"loss": 0.3672, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7749689183588894, |
|
"grad_norm": 1.9454457763257478, |
|
"learning_rate": 1.4619077562118477e-06, |
|
"loss": 0.368, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.7791131371736427, |
|
"grad_norm": 2.057587525250662, |
|
"learning_rate": 1.411132096682606e-06, |
|
"loss": 0.3694, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7832573559883962, |
|
"grad_norm": 2.1292730946235934, |
|
"learning_rate": 1.3611086316742057e-06, |
|
"loss": 0.3595, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7874015748031497, |
|
"grad_norm": 2.022478659877928, |
|
"learning_rate": 1.3118478456562073e-06, |
|
"loss": 0.3635, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.791545793617903, |
|
"grad_norm": 2.0367686920587857, |
|
"learning_rate": 1.2633600632474962e-06, |
|
"loss": 0.3593, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.7956900124326565, |
|
"grad_norm": 2.1462356201632065, |
|
"learning_rate": 1.2156554470523364e-06, |
|
"loss": 0.3589, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7998342312474098, |
|
"grad_norm": 2.068674533072379, |
|
"learning_rate": 1.1687439955303764e-06, |
|
"loss": 0.3645, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8039784500621633, |
|
"grad_norm": 2.107218769695033, |
|
"learning_rate": 1.1226355409010686e-06, |
|
"loss": 0.3629, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8081226688769167, |
|
"grad_norm": 2.1600739342990427, |
|
"learning_rate": 1.0773397470829145e-06, |
|
"loss": 0.3509, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8122668876916701, |
|
"grad_norm": 2.056346466443948, |
|
"learning_rate": 1.032866107667999e-06, |
|
"loss": 0.3642, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8164111065064236, |
|
"grad_norm": 2.055999415717534, |
|
"learning_rate": 9.892239439322243e-07, |
|
"loss": 0.3608, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8205553253211769, |
|
"grad_norm": 1.9865625625009857, |
|
"learning_rate": 9.464224028816427e-07, |
|
"loss": 0.352, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8246995441359304, |
|
"grad_norm": 2.090284485836539, |
|
"learning_rate": 9.044704553353323e-07, |
|
"loss": 0.3552, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.8288437629506838, |
|
"grad_norm": 2.0868083838688585, |
|
"learning_rate": 8.633768940451981e-07, |
|
"loss": 0.3494, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8329879817654372, |
|
"grad_norm": 2.0849345095616507, |
|
"learning_rate": 8.231503318530814e-07, |
|
"loss": 0.3503, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8371322005801907, |
|
"grad_norm": 2.036555088205375, |
|
"learning_rate": 7.837991998855899e-07, |
|
"loss": 0.3532, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.841276419394944, |
|
"grad_norm": 1.9647600324287684, |
|
"learning_rate": 7.453317457870096e-07, |
|
"loss": 0.3526, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.8454206382096975, |
|
"grad_norm": 1.9946404926738106, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 0.3507, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8495648570244508, |
|
"grad_norm": 2.0929585255558805, |
|
"learning_rate": 6.710799340291341e-07, |
|
"loss": 0.3565, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.8537090758392043, |
|
"grad_norm": 1.9862050269807172, |
|
"learning_rate": 6.353111388835564e-07, |
|
"loss": 0.3504, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8578532946539578, |
|
"grad_norm": 2.027382864636772, |
|
"learning_rate": 6.00457143372557e-07, |
|
"loss": 0.352, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.8619975134687111, |
|
"grad_norm": 2.090716583561947, |
|
"learning_rate": 5.665252525809583e-07, |
|
"loss": 0.3425, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8661417322834646, |
|
"grad_norm": 2.06905352203202, |
|
"learning_rate": 5.335225783287051e-07, |
|
"loss": 0.3407, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.8702859510982179, |
|
"grad_norm": 2.125696851804249, |
|
"learning_rate": 5.014560376802913e-07, |
|
"loss": 0.3487, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8744301699129714, |
|
"grad_norm": 1.9923666150875952, |
|
"learning_rate": 4.703323514950042e-07, |
|
"loss": 0.3409, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.8785743887277249, |
|
"grad_norm": 2.0327181763907736, |
|
"learning_rate": 4.401580430182928e-07, |
|
"loss": 0.3506, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8827186075424782, |
|
"grad_norm": 2.255853441666556, |
|
"learning_rate": 4.1093943651455305e-07, |
|
"loss": 0.3459, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.8868628263572317, |
|
"grad_norm": 2.0691292772221663, |
|
"learning_rate": 3.826826559416219e-07, |
|
"loss": 0.3483, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.891007045171985, |
|
"grad_norm": 2.0290765748725605, |
|
"learning_rate": 3.5539362366724784e-07, |
|
"loss": 0.3501, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.8951512639867385, |
|
"grad_norm": 2.1150371568126975, |
|
"learning_rate": 3.290780592278148e-07, |
|
"loss": 0.3442, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.899295482801492, |
|
"grad_norm": 1.9671376281528878, |
|
"learning_rate": 3.0374147812958387e-07, |
|
"loss": 0.3431, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.9034397016162453, |
|
"grad_norm": 2.046538088407744, |
|
"learning_rate": 2.7938919069268654e-07, |
|
"loss": 0.337, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9075839204309988, |
|
"grad_norm": 2.0585547681661756, |
|
"learning_rate": 2.5602630093813253e-07, |
|
"loss": 0.3452, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.9117281392457521, |
|
"grad_norm": 2.080490788044654, |
|
"learning_rate": 2.3365770551805223e-07, |
|
"loss": 0.3399, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9158723580605056, |
|
"grad_norm": 2.040411049775766, |
|
"learning_rate": 2.1228809268940164e-07, |
|
"loss": 0.3393, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.9200165768752591, |
|
"grad_norm": 2.0655064123833498, |
|
"learning_rate": 1.919219413313478e-07, |
|
"loss": 0.3395, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9241607956900124, |
|
"grad_norm": 2.140596373973613, |
|
"learning_rate": 1.725635200065323e-07, |
|
"loss": 0.3405, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.9283050145047659, |
|
"grad_norm": 1.986374862328418, |
|
"learning_rate": 1.5421688606642392e-07, |
|
"loss": 0.3428, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9324492333195192, |
|
"grad_norm": 2.040129619292376, |
|
"learning_rate": 1.3688588480092913e-07, |
|
"loss": 0.3446, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9365934521342727, |
|
"grad_norm": 2.01389490087078, |
|
"learning_rate": 1.205741486324552e-07, |
|
"loss": 0.3397, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9407376709490262, |
|
"grad_norm": 2.1289409520473326, |
|
"learning_rate": 1.0528509635458873e-07, |
|
"loss": 0.3401, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.9448818897637795, |
|
"grad_norm": 2.0835955722730697, |
|
"learning_rate": 9.102193241554757e-08, |
|
"loss": 0.3415, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.949026108578533, |
|
"grad_norm": 2.122449489250868, |
|
"learning_rate": 7.778764624655433e-08, |
|
"loss": 0.3437, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.9531703273932863, |
|
"grad_norm": 2.0577943752263836, |
|
"learning_rate": 6.558501163527964e-08, |
|
"loss": 0.3388, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9573145462080398, |
|
"grad_norm": 2.126069664831503, |
|
"learning_rate": 5.44165861444812e-08, |
|
"loss": 0.337, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.9614587650227931, |
|
"grad_norm": 1.998215049010736, |
|
"learning_rate": 4.428471057596362e-08, |
|
"loss": 0.3378, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9656029838375466, |
|
"grad_norm": 2.0929533756003904, |
|
"learning_rate": 3.519150847996422e-08, |
|
"loss": 0.3379, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.9697472026523001, |
|
"grad_norm": 2.0561899209040186, |
|
"learning_rate": 2.713888571007739e-08, |
|
"loss": 0.3388, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.9738914214670534, |
|
"grad_norm": 1.9664762265851157, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 0.3396, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.9780356402818069, |
|
"grad_norm": 1.999931793720276, |
|
"learning_rate": 1.4161910728816009e-08, |
|
"loss": 0.3408, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.9821798590965602, |
|
"grad_norm": 2.099348366631809, |
|
"learning_rate": 9.240278374995637e-09, |
|
"loss": 0.3438, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.9863240779113137, |
|
"grad_norm": 2.0961585662402937, |
|
"learning_rate": 5.364664492337746e-09, |
|
"loss": 0.3382, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9904682967260672, |
|
"grad_norm": 2.0713909816710956, |
|
"learning_rate": 2.5358813747500266e-09, |
|
"loss": 0.3392, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.9946125155408205, |
|
"grad_norm": 2.0497170104333793, |
|
"learning_rate": 7.545219097987444e-10, |
|
"loss": 0.333, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.998756734355574, |
|
"grad_norm": 2.039086392116031, |
|
"learning_rate": 2.0959454449243076e-11, |
|
"loss": 0.3409, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.9995855781185247, |
|
"eval_loss": 0.3220805823802948, |
|
"eval_runtime": 1.1842, |
|
"eval_samples_per_second": 2.533, |
|
"eval_steps_per_second": 0.844, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.9995855781185247, |
|
"step": 1206, |
|
"total_flos": 252407174922240.0, |
|
"train_loss": 0.5270945630461027, |
|
"train_runtime": 23848.9709, |
|
"train_samples_per_second": 1.619, |
|
"train_steps_per_second": 0.051 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1206, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 252407174922240.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|