|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 200, |
|
"global_step": 4852, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00020610057708161583, |
|
"grad_norm": 0.7938007287198758, |
|
"learning_rate": 4.1152263374485604e-07, |
|
"loss": 1.2403, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001030502885408079, |
|
"grad_norm": 0.9829139995457005, |
|
"learning_rate": 2.05761316872428e-06, |
|
"loss": 1.5592, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002061005770816158, |
|
"grad_norm": 1.3104221596563024, |
|
"learning_rate": 4.11522633744856e-06, |
|
"loss": 1.9165, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0030915086562242375, |
|
"grad_norm": 0.8263152262533461, |
|
"learning_rate": 6.172839506172839e-06, |
|
"loss": 1.6148, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004122011541632316, |
|
"grad_norm": 1.2188669223330737, |
|
"learning_rate": 8.23045267489712e-06, |
|
"loss": 1.7383, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005152514427040396, |
|
"grad_norm": 1.3669452124425536, |
|
"learning_rate": 1.02880658436214e-05, |
|
"loss": 1.8777, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.006183017312448475, |
|
"grad_norm": 0.6877557296913395, |
|
"learning_rate": 1.2345679012345678e-05, |
|
"loss": 1.3855, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.007213520197856554, |
|
"grad_norm": 0.6292369266751937, |
|
"learning_rate": 1.440329218106996e-05, |
|
"loss": 1.7622, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.008244023083264633, |
|
"grad_norm": 0.43117531258376596, |
|
"learning_rate": 1.646090534979424e-05, |
|
"loss": 1.4525, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.009274525968672712, |
|
"grad_norm": 0.3651450490233128, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 1.546, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.010305028854080791, |
|
"grad_norm": 0.4844380918063832, |
|
"learning_rate": 2.05761316872428e-05, |
|
"loss": 1.6499, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01133553173948887, |
|
"grad_norm": 0.33393658192356523, |
|
"learning_rate": 2.2633744855967078e-05, |
|
"loss": 1.1817, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01236603462489695, |
|
"grad_norm": 0.34761501749088797, |
|
"learning_rate": 2.4691358024691357e-05, |
|
"loss": 1.5655, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01339653751030503, |
|
"grad_norm": 0.3221134242168732, |
|
"learning_rate": 2.6748971193415638e-05, |
|
"loss": 1.2608, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.014427040395713108, |
|
"grad_norm": 0.31264722399213424, |
|
"learning_rate": 2.880658436213992e-05, |
|
"loss": 1.4104, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.015457543281121188, |
|
"grad_norm": 0.3955664926406516, |
|
"learning_rate": 3.08641975308642e-05, |
|
"loss": 1.4622, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.016488046166529265, |
|
"grad_norm": 0.3149436282257214, |
|
"learning_rate": 3.292181069958848e-05, |
|
"loss": 1.159, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.017518549051937345, |
|
"grad_norm": 0.2854140231011414, |
|
"learning_rate": 3.497942386831276e-05, |
|
"loss": 1.4829, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.018549051937345424, |
|
"grad_norm": 0.3326334208099821, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 1.1516, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.019579554822753503, |
|
"grad_norm": 0.27192427116328566, |
|
"learning_rate": 3.909465020576132e-05, |
|
"loss": 1.3785, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.020610057708161583, |
|
"grad_norm": 0.3152208635246729, |
|
"learning_rate": 4.11522633744856e-05, |
|
"loss": 1.3886, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.021640560593569662, |
|
"grad_norm": 0.27663290557612785, |
|
"learning_rate": 4.3209876543209875e-05, |
|
"loss": 1.1185, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02267106347897774, |
|
"grad_norm": 0.27185356829855356, |
|
"learning_rate": 4.5267489711934157e-05, |
|
"loss": 1.4773, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02370156636438582, |
|
"grad_norm": 0.2935081378811297, |
|
"learning_rate": 4.732510288065844e-05, |
|
"loss": 1.261, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0247320692497939, |
|
"grad_norm": 0.29236576081560334, |
|
"learning_rate": 4.938271604938271e-05, |
|
"loss": 1.3387, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02576257213520198, |
|
"grad_norm": 0.38637732181078555, |
|
"learning_rate": 5.1440329218106995e-05, |
|
"loss": 1.4305, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.02679307502061006, |
|
"grad_norm": 0.2982040676995298, |
|
"learning_rate": 5.3497942386831277e-05, |
|
"loss": 1.0162, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.027823577906018138, |
|
"grad_norm": 0.2699066491657892, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 1.4066, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.028854080791426217, |
|
"grad_norm": 0.33203812944453787, |
|
"learning_rate": 5.761316872427984e-05, |
|
"loss": 1.1751, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.029884583676834296, |
|
"grad_norm": 0.3312647784314266, |
|
"learning_rate": 5.9670781893004115e-05, |
|
"loss": 1.2269, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.030915086562242376, |
|
"grad_norm": 0.541956195146165, |
|
"learning_rate": 6.17283950617284e-05, |
|
"loss": 1.354, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.031945589447650455, |
|
"grad_norm": 0.3172355233573687, |
|
"learning_rate": 6.378600823045267e-05, |
|
"loss": 1.0666, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03297609233305853, |
|
"grad_norm": 0.32272337262102746, |
|
"learning_rate": 6.584362139917696e-05, |
|
"loss": 1.4928, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03400659521846661, |
|
"grad_norm": 0.4252299718796606, |
|
"learning_rate": 6.790123456790123e-05, |
|
"loss": 1.1979, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03503709810387469, |
|
"grad_norm": 0.2816942875302398, |
|
"learning_rate": 6.995884773662552e-05, |
|
"loss": 1.3223, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03606760098928277, |
|
"grad_norm": 0.34311165874954624, |
|
"learning_rate": 7.20164609053498e-05, |
|
"loss": 1.4296, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.03709810387469085, |
|
"grad_norm": 0.297834971078222, |
|
"learning_rate": 7.407407407407407e-05, |
|
"loss": 1.087, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03812860676009893, |
|
"grad_norm": 0.2602802208757066, |
|
"learning_rate": 7.613168724279836e-05, |
|
"loss": 1.4754, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.039159109645507006, |
|
"grad_norm": 0.40680201193562965, |
|
"learning_rate": 7.818930041152264e-05, |
|
"loss": 1.1889, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04018961253091509, |
|
"grad_norm": 0.45086574163739, |
|
"learning_rate": 8.024691358024692e-05, |
|
"loss": 1.2851, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.041220115416323165, |
|
"grad_norm": 0.4308650820975275, |
|
"learning_rate": 8.23045267489712e-05, |
|
"loss": 1.3478, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.041220115416323165, |
|
"eval_loss": 1.2309638261795044, |
|
"eval_runtime": 2469.2141, |
|
"eval_samples_per_second": 3.24, |
|
"eval_steps_per_second": 0.202, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04225061830173125, |
|
"grad_norm": 0.37845098126001203, |
|
"learning_rate": 8.436213991769549e-05, |
|
"loss": 1.0707, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.043281121187139324, |
|
"grad_norm": 0.2851215824173796, |
|
"learning_rate": 8.641975308641975e-05, |
|
"loss": 1.4131, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.044311624072547406, |
|
"grad_norm": 0.3309919261494515, |
|
"learning_rate": 8.847736625514404e-05, |
|
"loss": 1.1951, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.04534212695795548, |
|
"grad_norm": 0.3065911094811643, |
|
"learning_rate": 9.053497942386831e-05, |
|
"loss": 1.2554, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.04637262984336356, |
|
"grad_norm": 0.32586015367310234, |
|
"learning_rate": 9.25925925925926e-05, |
|
"loss": 1.4263, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.04740313272877164, |
|
"grad_norm": 0.277075233563208, |
|
"learning_rate": 9.465020576131688e-05, |
|
"loss": 1.0216, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.04843363561417972, |
|
"grad_norm": 0.2769072933842902, |
|
"learning_rate": 9.670781893004116e-05, |
|
"loss": 1.3689, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.0494641384995878, |
|
"grad_norm": 0.27922329154610587, |
|
"learning_rate": 9.876543209876543e-05, |
|
"loss": 1.2181, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.050494641384995875, |
|
"grad_norm": 0.30982282181292814, |
|
"learning_rate": 0.00010082304526748971, |
|
"loss": 1.2183, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05152514427040396, |
|
"grad_norm": 0.33257211954241994, |
|
"learning_rate": 0.00010288065843621399, |
|
"loss": 1.4305, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.052555647155812034, |
|
"grad_norm": 0.2838288504939334, |
|
"learning_rate": 0.00010493827160493828, |
|
"loss": 1.0051, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05358615004122012, |
|
"grad_norm": 0.28623162042613864, |
|
"learning_rate": 0.00010699588477366255, |
|
"loss": 1.436, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.05461665292662819, |
|
"grad_norm": 0.2720219498268959, |
|
"learning_rate": 0.00010905349794238684, |
|
"loss": 1.1381, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.055647155812036275, |
|
"grad_norm": 0.30404224920143047, |
|
"learning_rate": 0.00011111111111111112, |
|
"loss": 1.1905, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.05667765869744435, |
|
"grad_norm": 0.3469618797327675, |
|
"learning_rate": 0.0001131687242798354, |
|
"loss": 1.3797, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.057708161582852434, |
|
"grad_norm": 0.30848130199190793, |
|
"learning_rate": 0.00011522633744855968, |
|
"loss": 1.0163, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.05873866446826051, |
|
"grad_norm": 0.2587435053896794, |
|
"learning_rate": 0.00011728395061728397, |
|
"loss": 1.401, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.05976916735366859, |
|
"grad_norm": 0.30665664697045764, |
|
"learning_rate": 0.00011934156378600823, |
|
"loss": 1.197, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06079967023907667, |
|
"grad_norm": 0.28688303187237163, |
|
"learning_rate": 0.0001213991769547325, |
|
"loss": 1.2817, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06183017312448475, |
|
"grad_norm": 0.32502880249151705, |
|
"learning_rate": 0.0001234567901234568, |
|
"loss": 1.388, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06286067600989283, |
|
"grad_norm": 0.30699533228385806, |
|
"learning_rate": 0.00012551440329218108, |
|
"loss": 1.0403, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06389117889530091, |
|
"grad_norm": 0.29902009304688737, |
|
"learning_rate": 0.00012757201646090534, |
|
"loss": 1.3604, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.06492168178070899, |
|
"grad_norm": 0.26224004465269324, |
|
"learning_rate": 0.00012962962962962963, |
|
"loss": 1.12, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.06595218466611706, |
|
"grad_norm": 0.2697856637362638, |
|
"learning_rate": 0.00013168724279835392, |
|
"loss": 1.295, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.06698268755152514, |
|
"grad_norm": 0.3118309921774858, |
|
"learning_rate": 0.0001337448559670782, |
|
"loss": 1.3498, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.06801319043693323, |
|
"grad_norm": 0.3774735054690596, |
|
"learning_rate": 0.00013580246913580247, |
|
"loss": 1.1412, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0690436933223413, |
|
"grad_norm": 0.27963249445347155, |
|
"learning_rate": 0.00013786008230452676, |
|
"loss": 1.4175, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07007419620774938, |
|
"grad_norm": 0.25343833022804746, |
|
"learning_rate": 0.00013991769547325105, |
|
"loss": 1.1228, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07110469909315746, |
|
"grad_norm": 0.2676785839608282, |
|
"learning_rate": 0.00014197530864197534, |
|
"loss": 1.2235, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07213520197856554, |
|
"grad_norm": 0.33130141183815665, |
|
"learning_rate": 0.0001440329218106996, |
|
"loss": 1.3623, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07316570486397361, |
|
"grad_norm": 0.2676095204791478, |
|
"learning_rate": 0.00014609053497942386, |
|
"loss": 1.0355, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.0741962077493817, |
|
"grad_norm": 0.31354711887470305, |
|
"learning_rate": 0.00014814814814814815, |
|
"loss": 1.3388, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.07522671063478978, |
|
"grad_norm": 0.26048475689786116, |
|
"learning_rate": 0.00015020576131687243, |
|
"loss": 1.1111, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.07625721352019786, |
|
"grad_norm": 0.3339375055235073, |
|
"learning_rate": 0.00015226337448559672, |
|
"loss": 1.1734, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.07728771640560593, |
|
"grad_norm": 0.327943444067688, |
|
"learning_rate": 0.00015432098765432098, |
|
"loss": 1.3208, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.07831821929101401, |
|
"grad_norm": 0.2830129836407308, |
|
"learning_rate": 0.00015637860082304527, |
|
"loss": 1.0787, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.0793487221764221, |
|
"grad_norm": 0.2751640321887791, |
|
"learning_rate": 0.00015843621399176956, |
|
"loss": 1.3772, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08037922506183018, |
|
"grad_norm": 0.27545532948015855, |
|
"learning_rate": 0.00016049382716049385, |
|
"loss": 1.1672, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08140972794723825, |
|
"grad_norm": 0.300224560861019, |
|
"learning_rate": 0.0001625514403292181, |
|
"loss": 1.3224, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08244023083264633, |
|
"grad_norm": 0.3227759504569782, |
|
"learning_rate": 0.0001646090534979424, |
|
"loss": 1.3495, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08244023083264633, |
|
"eval_loss": 1.1826071739196777, |
|
"eval_runtime": 2476.8142, |
|
"eval_samples_per_second": 3.23, |
|
"eval_steps_per_second": 0.202, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08347073371805441, |
|
"grad_norm": 0.27853687771921737, |
|
"learning_rate": 0.0001666666666666667, |
|
"loss": 1.1022, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.0845012366034625, |
|
"grad_norm": 0.2685951286639512, |
|
"learning_rate": 0.00016872427983539098, |
|
"loss": 1.4061, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.08553173948887056, |
|
"grad_norm": 0.2608899320776935, |
|
"learning_rate": 0.00017078189300411524, |
|
"loss": 1.1174, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.08656224237427865, |
|
"grad_norm": 0.3397550472709015, |
|
"learning_rate": 0.0001728395061728395, |
|
"loss": 1.2198, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.08759274525968673, |
|
"grad_norm": 0.25033631170694937, |
|
"learning_rate": 0.0001748971193415638, |
|
"loss": 1.4029, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.08862324814509481, |
|
"grad_norm": 0.3049472388523693, |
|
"learning_rate": 0.00017695473251028808, |
|
"loss": 1.0265, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.08965375103050288, |
|
"grad_norm": 0.29598713943285077, |
|
"learning_rate": 0.00017901234567901234, |
|
"loss": 1.3719, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09068425391591096, |
|
"grad_norm": 0.2656526738754943, |
|
"learning_rate": 0.00018106995884773663, |
|
"loss": 1.1238, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09171475680131905, |
|
"grad_norm": 0.2546218728984369, |
|
"learning_rate": 0.00018312757201646091, |
|
"loss": 1.1853, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09274525968672712, |
|
"grad_norm": 0.309485029099833, |
|
"learning_rate": 0.0001851851851851852, |
|
"loss": 1.3064, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.0937757625721352, |
|
"grad_norm": 0.28184488141448555, |
|
"learning_rate": 0.00018724279835390946, |
|
"loss": 1.0605, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.09480626545754328, |
|
"grad_norm": 0.2913075285039461, |
|
"learning_rate": 0.00018930041152263375, |
|
"loss": 1.4311, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.09583676834295136, |
|
"grad_norm": 0.7186836779179142, |
|
"learning_rate": 0.00019135802469135804, |
|
"loss": 1.1263, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.09686727122835943, |
|
"grad_norm": 0.29590695855715343, |
|
"learning_rate": 0.00019341563786008233, |
|
"loss": 1.2589, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.09789777411376752, |
|
"grad_norm": 0.31832697133586063, |
|
"learning_rate": 0.0001954732510288066, |
|
"loss": 1.395, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.0989282769991756, |
|
"grad_norm": 0.23627896192430478, |
|
"learning_rate": 0.00019753086419753085, |
|
"loss": 1.0703, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.09995877988458368, |
|
"grad_norm": 0.27354848483986705, |
|
"learning_rate": 0.00019958847736625514, |
|
"loss": 1.3205, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.10098928276999175, |
|
"grad_norm": 0.4895153568779187, |
|
"learning_rate": 0.00019999958578867407, |
|
"loss": 1.1488, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.10201978565539983, |
|
"grad_norm": 0.2811706808521417, |
|
"learning_rate": 0.00019999790306104336, |
|
"loss": 1.2442, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.10305028854080792, |
|
"grad_norm": 0.2572161259717821, |
|
"learning_rate": 0.0001999949259506647, |
|
"loss": 1.3355, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.104080791426216, |
|
"grad_norm": 0.41643641721819225, |
|
"learning_rate": 0.00019999065449607402, |
|
"loss": 1.0238, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.10511129431162407, |
|
"grad_norm": 0.2871689949893079, |
|
"learning_rate": 0.00019998508875256158, |
|
"loss": 1.3608, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.10614179719703215, |
|
"grad_norm": 0.3219574868373838, |
|
"learning_rate": 0.0001999782287921708, |
|
"loss": 1.0774, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.10717230008244023, |
|
"grad_norm": 0.23385146626580391, |
|
"learning_rate": 0.00019997007470369773, |
|
"loss": 1.184, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.10820280296784832, |
|
"grad_norm": 0.3585994112684759, |
|
"learning_rate": 0.0001999606265926897, |
|
"loss": 1.3426, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.10923330585325638, |
|
"grad_norm": 0.2894131584925935, |
|
"learning_rate": 0.000199949884581444, |
|
"loss": 1.099, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11026380873866447, |
|
"grad_norm": 0.2824735567473504, |
|
"learning_rate": 0.00019993784880900623, |
|
"loss": 1.3997, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11129431162407255, |
|
"grad_norm": 0.23322094258169807, |
|
"learning_rate": 0.0001999245194311687, |
|
"loss": 1.1019, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11232481450948063, |
|
"grad_norm": 0.30614162605617107, |
|
"learning_rate": 0.00019990989662046818, |
|
"loss": 1.2576, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.1133553173948887, |
|
"grad_norm": 0.3389548425122287, |
|
"learning_rate": 0.0001998939805661837, |
|
"loss": 1.301, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.11438582028029678, |
|
"grad_norm": 0.25179179489194975, |
|
"learning_rate": 0.00019987677147433432, |
|
"loss": 0.9871, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.11541632316570487, |
|
"grad_norm": 0.24522921674032325, |
|
"learning_rate": 0.0001998582695676762, |
|
"loss": 1.4205, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.11644682605111294, |
|
"grad_norm": 0.2528881733347297, |
|
"learning_rate": 0.00019983847508569987, |
|
"loss": 1.1283, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.11747732893652102, |
|
"grad_norm": 0.28075131598249065, |
|
"learning_rate": 0.00019981738828462703, |
|
"loss": 1.2467, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.1185078318219291, |
|
"grad_norm": 0.3223992578484773, |
|
"learning_rate": 0.00019979500943740735, |
|
"loss": 1.3673, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.11953833470733718, |
|
"grad_norm": 0.2804260590851507, |
|
"learning_rate": 0.00019977133883371478, |
|
"loss": 1.1133, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12056883759274525, |
|
"grad_norm": 0.25183674337041767, |
|
"learning_rate": 0.00019974637677994404, |
|
"loss": 1.3493, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12159934047815334, |
|
"grad_norm": 0.23352143639236672, |
|
"learning_rate": 0.00019972012359920638, |
|
"loss": 1.1183, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12262984336356142, |
|
"grad_norm": 0.2597060461259452, |
|
"learning_rate": 0.0001996925796313256, |
|
"loss": 1.2021, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.1236603462489695, |
|
"grad_norm": 0.31429525704141625, |
|
"learning_rate": 0.00019966374523283347, |
|
"loss": 1.3753, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.1236603462489695, |
|
"eval_loss": 1.1556562185287476, |
|
"eval_runtime": 2465.6561, |
|
"eval_samples_per_second": 3.245, |
|
"eval_steps_per_second": 0.203, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.12469084913437757, |
|
"grad_norm": 0.23970501230526484, |
|
"learning_rate": 0.00019963362077696537, |
|
"loss": 0.9978, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.12572135201978565, |
|
"grad_norm": 0.24126751266896648, |
|
"learning_rate": 0.00019960220665365518, |
|
"loss": 1.3576, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.12675185490519372, |
|
"grad_norm": 0.29806682020411596, |
|
"learning_rate": 0.0001995695032695305, |
|
"loss": 1.0721, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.12778235779060182, |
|
"grad_norm": 0.3121199170110613, |
|
"learning_rate": 0.0001995355110479071, |
|
"loss": 1.2159, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.1288128606760099, |
|
"grad_norm": 0.3534465377015386, |
|
"learning_rate": 0.00019950023042878366, |
|
"loss": 1.3058, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.12984336356141798, |
|
"grad_norm": 0.23203831442914624, |
|
"learning_rate": 0.00019946366186883604, |
|
"loss": 0.9527, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13087386644682605, |
|
"grad_norm": 0.2720965062058241, |
|
"learning_rate": 0.00019942580584141127, |
|
"loss": 1.3558, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13190436933223412, |
|
"grad_norm": 0.2629592480398592, |
|
"learning_rate": 0.0001993866628365215, |
|
"loss": 1.1318, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13293487221764222, |
|
"grad_norm": 0.2920226997136243, |
|
"learning_rate": 0.00019934623336083772, |
|
"loss": 1.122, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.1339653751030503, |
|
"grad_norm": 0.27168430341439026, |
|
"learning_rate": 0.00019930451793768298, |
|
"loss": 1.3241, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.13499587798845836, |
|
"grad_norm": 0.2845184130718667, |
|
"learning_rate": 0.00019926151710702588, |
|
"loss": 0.9734, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.13602638087386645, |
|
"grad_norm": 0.2888242420563682, |
|
"learning_rate": 0.00019921723142547347, |
|
"loss": 1.3656, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.13705688375927452, |
|
"grad_norm": 0.27299177987397777, |
|
"learning_rate": 0.00019917166146626392, |
|
"loss": 1.0691, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.1380873866446826, |
|
"grad_norm": 0.35236952647741737, |
|
"learning_rate": 0.0001991248078192593, |
|
"loss": 1.2263, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1391178895300907, |
|
"grad_norm": 0.2681417629469303, |
|
"learning_rate": 0.00019907667109093794, |
|
"loss": 1.354, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14014839241549876, |
|
"grad_norm": 0.310226199539697, |
|
"learning_rate": 0.00019902725190438627, |
|
"loss": 1.0365, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14117889530090685, |
|
"grad_norm": 0.3036115548984946, |
|
"learning_rate": 0.00019897655089929126, |
|
"loss": 1.3279, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14220939818631492, |
|
"grad_norm": 0.24182676057547745, |
|
"learning_rate": 0.00019892456873193165, |
|
"loss": 1.0803, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.143239901071723, |
|
"grad_norm": 0.27410693799977515, |
|
"learning_rate": 0.00019887130607516978, |
|
"loss": 1.2236, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.1442704039571311, |
|
"grad_norm": 0.3158493448762225, |
|
"learning_rate": 0.00019881676361844275, |
|
"loss": 1.3796, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.14530090684253916, |
|
"grad_norm": 0.48562045991681607, |
|
"learning_rate": 0.0001987609420677535, |
|
"loss": 1.0014, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.14633140972794723, |
|
"grad_norm": 0.24730942495184344, |
|
"learning_rate": 0.00019870384214566174, |
|
"loss": 1.3463, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.14736191261335532, |
|
"grad_norm": 0.3153226513268582, |
|
"learning_rate": 0.00019864546459127448, |
|
"loss": 1.0977, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.1483924154987634, |
|
"grad_norm": 0.249510122134111, |
|
"learning_rate": 0.0001985858101602366, |
|
"loss": 1.2687, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.1494229183841715, |
|
"grad_norm": 0.30842159531538627, |
|
"learning_rate": 0.0001985248796247209, |
|
"loss": 1.2815, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15045342126957956, |
|
"grad_norm": 0.26379223642130784, |
|
"learning_rate": 0.00019846267377341827, |
|
"loss": 0.9403, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15148392415498763, |
|
"grad_norm": 0.23891641481355658, |
|
"learning_rate": 0.00019839919341152742, |
|
"loss": 1.3505, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15251442704039572, |
|
"grad_norm": 0.3238473098861904, |
|
"learning_rate": 0.00019833443936074442, |
|
"loss": 1.0409, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.1535449299258038, |
|
"grad_norm": 0.2746647114109431, |
|
"learning_rate": 0.00019826841245925212, |
|
"loss": 1.0984, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.15457543281121186, |
|
"grad_norm": 0.37674302298987083, |
|
"learning_rate": 0.00019820111356170923, |
|
"loss": 1.3147, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.15560593569661996, |
|
"grad_norm": 0.25219340224453657, |
|
"learning_rate": 0.00019813254353923937, |
|
"loss": 1.0348, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.15663643858202803, |
|
"grad_norm": 0.26735510810071406, |
|
"learning_rate": 0.00019806270327941971, |
|
"loss": 1.3468, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.15766694146743612, |
|
"grad_norm": 0.25452976189367954, |
|
"learning_rate": 0.00019799159368626945, |
|
"loss": 1.0699, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.1586974443528442, |
|
"grad_norm": 0.2863967455600455, |
|
"learning_rate": 0.00019791921568023822, |
|
"loss": 1.1045, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.15972794723825226, |
|
"grad_norm": 0.32564915808035255, |
|
"learning_rate": 0.00019784557019819404, |
|
"loss": 1.301, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16075845012366036, |
|
"grad_norm": 0.2598862519671672, |
|
"learning_rate": 0.00019777065819341137, |
|
"loss": 0.9559, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16178895300906843, |
|
"grad_norm": 0.2694147600296894, |
|
"learning_rate": 0.00019769448063555856, |
|
"loss": 1.346, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.1628194558944765, |
|
"grad_norm": 0.2464767798391986, |
|
"learning_rate": 0.00019761703851068553, |
|
"loss": 1.102, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.1638499587798846, |
|
"grad_norm": 0.2753953016773011, |
|
"learning_rate": 0.0001975383328212107, |
|
"loss": 1.2188, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.16488046166529266, |
|
"grad_norm": 0.38049304343681456, |
|
"learning_rate": 0.00019745836458590836, |
|
"loss": 1.3454, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.16488046166529266, |
|
"eval_loss": 1.1297026872634888, |
|
"eval_runtime": 2464.1297, |
|
"eval_samples_per_second": 3.247, |
|
"eval_steps_per_second": 0.203, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.16591096455070073, |
|
"grad_norm": 0.25205784909199425, |
|
"learning_rate": 0.0001973771348398953, |
|
"loss": 0.9905, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.16694146743610883, |
|
"grad_norm": 0.2719665322536544, |
|
"learning_rate": 0.0001972946446346173, |
|
"loss": 1.3772, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.1679719703215169, |
|
"grad_norm": 0.23804058554763297, |
|
"learning_rate": 0.00019721089503783577, |
|
"loss": 1.0834, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.169002473206925, |
|
"grad_norm": 0.28963422898407165, |
|
"learning_rate": 0.00019712588713361378, |
|
"loss": 1.2064, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17003297609233306, |
|
"grad_norm": 0.3442324930908095, |
|
"learning_rate": 0.00019703962202230203, |
|
"loss": 1.2967, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17106347897774113, |
|
"grad_norm": 0.2547770116672794, |
|
"learning_rate": 0.00019695210082052472, |
|
"loss": 0.949, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17209398186314923, |
|
"grad_norm": 0.2832243503202021, |
|
"learning_rate": 0.00019686332466116487, |
|
"loss": 1.2852, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.1731244847485573, |
|
"grad_norm": 0.2332582520614932, |
|
"learning_rate": 0.0001967732946933499, |
|
"loss": 0.979, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.17415498763396536, |
|
"grad_norm": 0.29363618940675, |
|
"learning_rate": 0.00019668201208243658, |
|
"loss": 1.223, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.17518549051937346, |
|
"grad_norm": 0.28680318445015207, |
|
"learning_rate": 0.0001965894780099961, |
|
"loss": 1.3119, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.17621599340478153, |
|
"grad_norm": 0.2415531891983295, |
|
"learning_rate": 0.00019649569367379867, |
|
"loss": 0.9811, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.17724649629018963, |
|
"grad_norm": 0.2766629334909396, |
|
"learning_rate": 0.00019640066028779794, |
|
"loss": 1.2464, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.1782769991755977, |
|
"grad_norm": 0.24460494296561755, |
|
"learning_rate": 0.00019630437908211548, |
|
"loss": 1.0896, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.17930750206100576, |
|
"grad_norm": 0.28580998835949145, |
|
"learning_rate": 0.00019620685130302478, |
|
"loss": 1.1636, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18033800494641386, |
|
"grad_norm": 0.29222255776500833, |
|
"learning_rate": 0.00019610807821293503, |
|
"loss": 1.3137, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18136850783182193, |
|
"grad_norm": 0.2684207256549492, |
|
"learning_rate": 0.00019600806109037485, |
|
"loss": 0.9964, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18239901071723, |
|
"grad_norm": 0.2559298498236225, |
|
"learning_rate": 0.00019590680122997582, |
|
"loss": 1.3378, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.1834295136026381, |
|
"grad_norm": 0.2349865929838881, |
|
"learning_rate": 0.00019580429994245555, |
|
"loss": 1.0436, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.18446001648804616, |
|
"grad_norm": 0.29736696922743316, |
|
"learning_rate": 0.0001957005585546009, |
|
"loss": 1.1778, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.18549051937345423, |
|
"grad_norm": 0.34641352119598734, |
|
"learning_rate": 0.00019559557840925055, |
|
"loss": 1.289, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.18652102225886233, |
|
"grad_norm": 0.2589416225643458, |
|
"learning_rate": 0.00019548936086527798, |
|
"loss": 0.9491, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.1875515251442704, |
|
"grad_norm": 0.2778855204059982, |
|
"learning_rate": 0.00019538190729757356, |
|
"loss": 1.3701, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.1885820280296785, |
|
"grad_norm": 0.2433946170800378, |
|
"learning_rate": 0.00019527321909702688, |
|
"loss": 1.0454, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.18961253091508656, |
|
"grad_norm": 0.28491470830869087, |
|
"learning_rate": 0.00019516329767050878, |
|
"loss": 1.2001, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19064303380049463, |
|
"grad_norm": 0.30637698470530034, |
|
"learning_rate": 0.00019505214444085308, |
|
"loss": 1.3096, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19167353668590273, |
|
"grad_norm": 0.24736218088176834, |
|
"learning_rate": 0.00019493976084683813, |
|
"loss": 0.9663, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.1927040395713108, |
|
"grad_norm": 0.24983643400010644, |
|
"learning_rate": 0.00019482614834316836, |
|
"loss": 1.3698, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.19373454245671887, |
|
"grad_norm": 0.2420833122771942, |
|
"learning_rate": 0.00019471130840045518, |
|
"loss": 1.0335, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.19476504534212696, |
|
"grad_norm": 0.2573293132603339, |
|
"learning_rate": 0.00019459524250519826, |
|
"loss": 1.1905, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.19579554822753503, |
|
"grad_norm": 0.3710740261808263, |
|
"learning_rate": 0.00019447795215976594, |
|
"loss": 1.3102, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.19682605111294313, |
|
"grad_norm": 0.2569919144815, |
|
"learning_rate": 0.0001943594388823761, |
|
"loss": 0.8868, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.1978565539983512, |
|
"grad_norm": 0.3020196940157676, |
|
"learning_rate": 0.00019423970420707627, |
|
"loss": 1.3362, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.19888705688375927, |
|
"grad_norm": 0.23638629002825132, |
|
"learning_rate": 0.00019411874968372402, |
|
"loss": 1.0545, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.19991755976916736, |
|
"grad_norm": 0.2690903988670187, |
|
"learning_rate": 0.00019399657687796658, |
|
"loss": 1.1581, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.20094806265457543, |
|
"grad_norm": 0.3219945588976219, |
|
"learning_rate": 0.00019387318737122092, |
|
"loss": 1.2816, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2019785655399835, |
|
"grad_norm": 0.2510582302230213, |
|
"learning_rate": 0.0001937485827606529, |
|
"loss": 0.9363, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2030090684253916, |
|
"grad_norm": 0.3075547394305651, |
|
"learning_rate": 0.00019362276465915702, |
|
"loss": 1.2897, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.20403957131079967, |
|
"grad_norm": 0.24639351565359302, |
|
"learning_rate": 0.0001934957346953352, |
|
"loss": 1.0314, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.20507007419620774, |
|
"grad_norm": 0.31059587986354126, |
|
"learning_rate": 0.00019336749451347586, |
|
"loss": 1.2041, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.20610057708161583, |
|
"grad_norm": 0.2842021215906897, |
|
"learning_rate": 0.0001932380457735326, |
|
"loss": 1.2731, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.20610057708161583, |
|
"eval_loss": 1.1071134805679321, |
|
"eval_runtime": 2476.6697, |
|
"eval_samples_per_second": 3.23, |
|
"eval_steps_per_second": 0.202, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2071310799670239, |
|
"grad_norm": 0.25497821921270564, |
|
"learning_rate": 0.00019310739015110267, |
|
"loss": 0.9285, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.208161582852432, |
|
"grad_norm": 0.2793650388701531, |
|
"learning_rate": 0.00019297552933740547, |
|
"loss": 1.3337, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.20919208573784007, |
|
"grad_norm": 0.2516697285323032, |
|
"learning_rate": 0.0001928424650392603, |
|
"loss": 1.0655, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21022258862324814, |
|
"grad_norm": 0.30212273915620885, |
|
"learning_rate": 0.00019270819897906468, |
|
"loss": 1.2153, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21125309150865623, |
|
"grad_norm": 0.33419010602150057, |
|
"learning_rate": 0.00019257273289477174, |
|
"loss": 1.3252, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.2122835943940643, |
|
"grad_norm": 0.27421309631436125, |
|
"learning_rate": 0.00019243606853986786, |
|
"loss": 1.0082, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.21331409727947237, |
|
"grad_norm": 0.34717245381281975, |
|
"learning_rate": 0.00019229820768335, |
|
"loss": 1.3901, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.21434460016488047, |
|
"grad_norm": 0.23427202649137227, |
|
"learning_rate": 0.00019215915210970267, |
|
"loss": 0.9838, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.21537510305028854, |
|
"grad_norm": 0.2923332885069089, |
|
"learning_rate": 0.00019201890361887506, |
|
"loss": 1.1746, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.21640560593569663, |
|
"grad_norm": 0.34553332711604556, |
|
"learning_rate": 0.0001918774640262574, |
|
"loss": 1.2803, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.2174361088211047, |
|
"grad_norm": 0.282937796924011, |
|
"learning_rate": 0.00019173483516265788, |
|
"loss": 1.0297, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.21846661170651277, |
|
"grad_norm": 0.3064500972204145, |
|
"learning_rate": 0.00019159101887427854, |
|
"loss": 1.3082, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.21949711459192087, |
|
"grad_norm": 0.24140887151281765, |
|
"learning_rate": 0.00019144601702269162, |
|
"loss": 1.0166, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22052761747732894, |
|
"grad_norm": 0.2751668782987028, |
|
"learning_rate": 0.00019129983148481552, |
|
"loss": 1.149, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.221558120362737, |
|
"grad_norm": 0.32570210618301154, |
|
"learning_rate": 0.0001911524641528902, |
|
"loss": 1.287, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.2225886232481451, |
|
"grad_norm": 0.24698689309459554, |
|
"learning_rate": 0.00019100391693445306, |
|
"loss": 0.9291, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.22361912613355317, |
|
"grad_norm": 0.26397978494526464, |
|
"learning_rate": 0.00019085419175231394, |
|
"loss": 1.2573, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.22464962901896127, |
|
"grad_norm": 0.23809647134840975, |
|
"learning_rate": 0.00019070329054453046, |
|
"loss": 1.0226, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.22568013190436934, |
|
"grad_norm": 0.31058086961600134, |
|
"learning_rate": 0.00019055121526438272, |
|
"loss": 1.1561, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.2267106347897774, |
|
"grad_norm": 0.2778718312695914, |
|
"learning_rate": 0.00019039796788034822, |
|
"loss": 1.3034, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.2277411376751855, |
|
"grad_norm": 0.2417800787196308, |
|
"learning_rate": 0.00019024355037607622, |
|
"loss": 0.8994, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.22877164056059357, |
|
"grad_norm": 0.28904453048763723, |
|
"learning_rate": 0.00019008796475036222, |
|
"loss": 1.2439, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.22980214344600164, |
|
"grad_norm": 0.23624741886843423, |
|
"learning_rate": 0.00018993121301712193, |
|
"loss": 1.0735, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23083264633140974, |
|
"grad_norm": 0.27985038336320134, |
|
"learning_rate": 0.00018977329720536529, |
|
"loss": 1.1098, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.2318631492168178, |
|
"grad_norm": 0.2891758603238283, |
|
"learning_rate": 0.00018961421935917016, |
|
"loss": 1.2739, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23289365210222587, |
|
"grad_norm": 0.2635168830732569, |
|
"learning_rate": 0.00018945398153765597, |
|
"loss": 1.0068, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.23392415498763397, |
|
"grad_norm": 0.2773479010066933, |
|
"learning_rate": 0.00018929258581495685, |
|
"loss": 1.3011, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.23495465787304204, |
|
"grad_norm": 0.2560481495589164, |
|
"learning_rate": 0.00018913003428019506, |
|
"loss": 1.0617, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.23598516075845014, |
|
"grad_norm": 0.277847323563774, |
|
"learning_rate": 0.00018896632903745374, |
|
"loss": 1.0843, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.2370156636438582, |
|
"grad_norm": 0.3618488575426716, |
|
"learning_rate": 0.00018880147220574976, |
|
"loss": 1.2909, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.23804616652926627, |
|
"grad_norm": 0.2840442439659897, |
|
"learning_rate": 0.00018863546591900622, |
|
"loss": 0.9916, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.23907666941467437, |
|
"grad_norm": 0.2904562870162801, |
|
"learning_rate": 0.00018846831232602492, |
|
"loss": 1.3014, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24010717230008244, |
|
"grad_norm": 0.24724715741223574, |
|
"learning_rate": 0.00018830001359045845, |
|
"loss": 0.9967, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.2411376751854905, |
|
"grad_norm": 0.26593630753000524, |
|
"learning_rate": 0.00018813057189078243, |
|
"loss": 1.0626, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.2421681780708986, |
|
"grad_norm": 0.3276447476001242, |
|
"learning_rate": 0.00018795998942026685, |
|
"loss": 1.2979, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.24319868095630667, |
|
"grad_norm": 0.26299318912598185, |
|
"learning_rate": 0.00018778826838694812, |
|
"loss": 0.9544, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.24422918384171477, |
|
"grad_norm": 0.281736858661113, |
|
"learning_rate": 0.0001876154110136003, |
|
"loss": 1.3208, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.24525968672712284, |
|
"grad_norm": 0.2855862979125554, |
|
"learning_rate": 0.0001874414195377063, |
|
"loss": 1.073, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.2462901896125309, |
|
"grad_norm": 0.29014069310260776, |
|
"learning_rate": 0.000187266296211429, |
|
"loss": 1.1681, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.247320692497939, |
|
"grad_norm": 0.3159103313554715, |
|
"learning_rate": 0.0001870900433015821, |
|
"loss": 1.3863, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.247320692497939, |
|
"eval_loss": 1.0878444910049438, |
|
"eval_runtime": 2463.5391, |
|
"eval_samples_per_second": 3.247, |
|
"eval_steps_per_second": 0.203, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.24835119538334707, |
|
"grad_norm": 0.2980434519288696, |
|
"learning_rate": 0.00018691266308960066, |
|
"loss": 1.0099, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.24938169826875514, |
|
"grad_norm": 0.2786508426961495, |
|
"learning_rate": 0.00018673415787151166, |
|
"loss": 1.3381, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25041220115416324, |
|
"grad_norm": 0.2676203832313212, |
|
"learning_rate": 0.00018655452995790435, |
|
"loss": 1.0199, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.2514427040395713, |
|
"grad_norm": 0.28577827654986465, |
|
"learning_rate": 0.00018637378167390018, |
|
"loss": 1.2001, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.2524732069249794, |
|
"grad_norm": 0.3398905155731309, |
|
"learning_rate": 0.0001861919153591228, |
|
"loss": 1.2629, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.25350370981038745, |
|
"grad_norm": 0.26851195221770446, |
|
"learning_rate": 0.00018600893336766786, |
|
"loss": 0.9409, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.25453421269579557, |
|
"grad_norm": 0.26580580512721763, |
|
"learning_rate": 0.00018582483806807228, |
|
"loss": 1.3071, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.25556471558120364, |
|
"grad_norm": 0.2571726332870005, |
|
"learning_rate": 0.0001856396318432838, |
|
"loss": 1.0674, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.2565952184666117, |
|
"grad_norm": 0.28205467317170896, |
|
"learning_rate": 0.0001854533170906302, |
|
"loss": 1.1248, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.2576257213520198, |
|
"grad_norm": 0.3312983996287865, |
|
"learning_rate": 0.00018526589622178802, |
|
"loss": 1.2796, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.25865622423742785, |
|
"grad_norm": 0.2757071364626265, |
|
"learning_rate": 0.00018507737166275154, |
|
"loss": 0.9658, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.25968672712283597, |
|
"grad_norm": 0.3459316658489597, |
|
"learning_rate": 0.00018488774585380125, |
|
"loss": 1.3143, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26071723000824404, |
|
"grad_norm": 0.2521379464850173, |
|
"learning_rate": 0.00018469702124947245, |
|
"loss": 0.9855, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.2617477328936521, |
|
"grad_norm": 0.28337200244179356, |
|
"learning_rate": 0.00018450520031852325, |
|
"loss": 1.2029, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.2627782357790602, |
|
"grad_norm": 0.3562233737662029, |
|
"learning_rate": 0.0001843122855439027, |
|
"loss": 1.2867, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.26380873866446825, |
|
"grad_norm": 0.2883121950988055, |
|
"learning_rate": 0.00018411827942271884, |
|
"loss": 0.978, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.2648392415498763, |
|
"grad_norm": 0.3157197550829857, |
|
"learning_rate": 0.000183923184466206, |
|
"loss": 1.2767, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.26586974443528444, |
|
"grad_norm": 0.24013448810718416, |
|
"learning_rate": 0.0001837270031996926, |
|
"loss": 1.0749, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.2669002473206925, |
|
"grad_norm": 0.32239639052379476, |
|
"learning_rate": 0.00018352973816256838, |
|
"loss": 1.1699, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.2679307502061006, |
|
"grad_norm": 0.29188168243683993, |
|
"learning_rate": 0.0001833313919082515, |
|
"loss": 1.3297, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.26896125309150865, |
|
"grad_norm": 0.2710883490462726, |
|
"learning_rate": 0.0001831319670041555, |
|
"loss": 0.9246, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.2699917559769167, |
|
"grad_norm": 0.2949025846368701, |
|
"learning_rate": 0.00018293146603165603, |
|
"loss": 1.339, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27102225886232484, |
|
"grad_norm": 0.24960673732959246, |
|
"learning_rate": 0.00018272989158605752, |
|
"loss": 1.0767, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.2720527617477329, |
|
"grad_norm": 0.31392425706558585, |
|
"learning_rate": 0.00018252724627655954, |
|
"loss": 1.1564, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.273083264633141, |
|
"grad_norm": 0.3755895664237009, |
|
"learning_rate": 0.00018232353272622302, |
|
"loss": 1.2677, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.27411376751854905, |
|
"grad_norm": 0.2711084044553726, |
|
"learning_rate": 0.00018211875357193632, |
|
"loss": 0.9061, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.2751442704039571, |
|
"grad_norm": 0.33175445691564986, |
|
"learning_rate": 0.00018191291146438105, |
|
"loss": 1.273, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.2761747732893652, |
|
"grad_norm": 0.2537090735923007, |
|
"learning_rate": 0.0001817060090679978, |
|
"loss": 1.0512, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.2772052761747733, |
|
"grad_norm": 0.36552128539895057, |
|
"learning_rate": 0.00018149804906095163, |
|
"loss": 1.2273, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.2782357790601814, |
|
"grad_norm": 0.3485807735804365, |
|
"learning_rate": 0.00018128903413509756, |
|
"loss": 1.2532, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.27926628194558945, |
|
"grad_norm": 0.2737156223424568, |
|
"learning_rate": 0.0001810789669959453, |
|
"loss": 1.0208, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.2802967848309975, |
|
"grad_norm": 0.3091305751030036, |
|
"learning_rate": 0.0001808678503626248, |
|
"loss": 1.3024, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.2813272877164056, |
|
"grad_norm": 0.2448701581737246, |
|
"learning_rate": 0.00018065568696785058, |
|
"loss": 1.0065, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.2823577906018137, |
|
"grad_norm": 0.33520109656163827, |
|
"learning_rate": 0.00018044247955788662, |
|
"loss": 1.1409, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.2833882934872218, |
|
"grad_norm": 0.36094563501069604, |
|
"learning_rate": 0.00018022823089251073, |
|
"loss": 1.2687, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.28441879637262985, |
|
"grad_norm": 0.29785608686411563, |
|
"learning_rate": 0.00018001294374497882, |
|
"loss": 0.9357, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.2854492992580379, |
|
"grad_norm": 0.3875323214985989, |
|
"learning_rate": 0.00017979662090198906, |
|
"loss": 1.2922, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.286479802143446, |
|
"grad_norm": 0.3058769686259136, |
|
"learning_rate": 0.00017957926516364565, |
|
"loss": 1.0047, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.2875103050288541, |
|
"grad_norm": 0.3030215125843954, |
|
"learning_rate": 0.00017936087934342283, |
|
"loss": 1.0829, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.2885408079142622, |
|
"grad_norm": 0.32726489328282893, |
|
"learning_rate": 0.00017914146626812823, |
|
"loss": 1.2567, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.2885408079142622, |
|
"eval_loss": 1.077731728553772, |
|
"eval_runtime": 2463.5195, |
|
"eval_samples_per_second": 3.247, |
|
"eval_steps_per_second": 0.203, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.28957131079967025, |
|
"grad_norm": 0.2992981015754632, |
|
"learning_rate": 0.0001789210287778664, |
|
"loss": 0.9792, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.2906018136850783, |
|
"grad_norm": 0.3418337934197468, |
|
"learning_rate": 0.00017869956972600202, |
|
"loss": 1.3454, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.2916323165704864, |
|
"grad_norm": 0.2526294763625045, |
|
"learning_rate": 0.00017847709197912296, |
|
"loss": 0.9715, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.29266281945589445, |
|
"grad_norm": 0.3445050915146703, |
|
"learning_rate": 0.0001782535984170032, |
|
"loss": 1.113, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.2936933223413026, |
|
"grad_norm": 0.32389102430563055, |
|
"learning_rate": 0.00017802909193256547, |
|
"loss": 1.2316, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.29472382522671065, |
|
"grad_norm": 0.2613877323741928, |
|
"learning_rate": 0.00017780357543184397, |
|
"loss": 0.9269, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.2957543281121187, |
|
"grad_norm": 0.2752040315178045, |
|
"learning_rate": 0.00017757705183394653, |
|
"loss": 1.2481, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.2967848309975268, |
|
"grad_norm": 0.2535871144663709, |
|
"learning_rate": 0.00017734952407101706, |
|
"loss": 1.0301, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.29781533388293485, |
|
"grad_norm": 0.28927983501411936, |
|
"learning_rate": 0.0001771209950881974, |
|
"loss": 1.1218, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.298845836768343, |
|
"grad_norm": 0.37005769485403384, |
|
"learning_rate": 0.00017689146784358927, |
|
"loss": 1.2699, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.29987633965375105, |
|
"grad_norm": 0.257690573166382, |
|
"learning_rate": 0.0001766609453082161, |
|
"loss": 0.9097, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3009068425391591, |
|
"grad_norm": 0.2708622421638633, |
|
"learning_rate": 0.00017642943046598436, |
|
"loss": 1.2685, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3019373454245672, |
|
"grad_norm": 0.2553661493662262, |
|
"learning_rate": 0.00017619692631364506, |
|
"loss": 1.062, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.30296784830997525, |
|
"grad_norm": 0.3268803054784382, |
|
"learning_rate": 0.00017596343586075497, |
|
"loss": 1.1081, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.3039983511953833, |
|
"grad_norm": 0.36412223215382217, |
|
"learning_rate": 0.00017572896212963754, |
|
"loss": 1.3221, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.30502885408079145, |
|
"grad_norm": 0.2624897168675397, |
|
"learning_rate": 0.000175493508155344, |
|
"loss": 0.854, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.3060593569661995, |
|
"grad_norm": 0.2910755462081272, |
|
"learning_rate": 0.00017525707698561385, |
|
"loss": 1.2961, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.3070898598516076, |
|
"grad_norm": 0.26020570413888594, |
|
"learning_rate": 0.00017501967168083557, |
|
"loss": 1.0475, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.30812036273701565, |
|
"grad_norm": 0.31355952905674644, |
|
"learning_rate": 0.00017478129531400688, |
|
"loss": 1.1383, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.3091508656224237, |
|
"grad_norm": 0.3394529288865745, |
|
"learning_rate": 0.00017454195097069505, |
|
"loss": 1.3237, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31018136850783184, |
|
"grad_norm": 0.2623708218727371, |
|
"learning_rate": 0.00017430164174899696, |
|
"loss": 0.8949, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.3112118713932399, |
|
"grad_norm": 0.3129719424005796, |
|
"learning_rate": 0.0001740603707594989, |
|
"loss": 1.2513, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.312242374278648, |
|
"grad_norm": 0.2457430728630137, |
|
"learning_rate": 0.00017381814112523648, |
|
"loss": 1.0458, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.31327287716405605, |
|
"grad_norm": 0.3235157301586437, |
|
"learning_rate": 0.000173574955981654, |
|
"loss": 1.1418, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.3143033800494641, |
|
"grad_norm": 0.38068266969796094, |
|
"learning_rate": 0.00017333081847656397, |
|
"loss": 1.2703, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.31533388293487224, |
|
"grad_norm": 0.2845050833721142, |
|
"learning_rate": 0.00017308573177010652, |
|
"loss": 0.8513, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.3163643858202803, |
|
"grad_norm": 0.3011956997232069, |
|
"learning_rate": 0.00017283969903470815, |
|
"loss": 1.2671, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.3173948887056884, |
|
"grad_norm": 0.2548678800376021, |
|
"learning_rate": 0.0001725927234550409, |
|
"loss": 0.9834, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.31842539159109645, |
|
"grad_norm": 0.33070817321371104, |
|
"learning_rate": 0.00017234480822798113, |
|
"loss": 1.204, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.3194558944765045, |
|
"grad_norm": 0.3739990601669961, |
|
"learning_rate": 0.00017209595656256807, |
|
"loss": 1.2874, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.3204863973619126, |
|
"grad_norm": 0.28630568916559335, |
|
"learning_rate": 0.0001718461716799623, |
|
"loss": 0.9404, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.3215169002473207, |
|
"grad_norm": 0.2984332733087229, |
|
"learning_rate": 0.000171595456813404, |
|
"loss": 1.3065, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.3225474031327288, |
|
"grad_norm": 0.25297494310840185, |
|
"learning_rate": 0.00017134381520817127, |
|
"loss": 0.9533, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.32357790601813685, |
|
"grad_norm": 0.3035622251707662, |
|
"learning_rate": 0.00017109125012153783, |
|
"loss": 1.1281, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.3246084089035449, |
|
"grad_norm": 0.3401968875424911, |
|
"learning_rate": 0.00017083776482273126, |
|
"loss": 1.2439, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.325638911788953, |
|
"grad_norm": 0.2742633328984466, |
|
"learning_rate": 0.00017058336259289026, |
|
"loss": 0.9067, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.3266694146743611, |
|
"grad_norm": 0.33201726031181766, |
|
"learning_rate": 0.0001703280467250225, |
|
"loss": 1.2643, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.3276999175597692, |
|
"grad_norm": 0.2740668506157443, |
|
"learning_rate": 0.0001700718205239618, |
|
"loss": 0.9757, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.32873042044517725, |
|
"grad_norm": 0.28447062660977135, |
|
"learning_rate": 0.0001698146873063255, |
|
"loss": 1.0629, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.3297609233305853, |
|
"grad_norm": 0.3855059276442332, |
|
"learning_rate": 0.00016955665040047134, |
|
"loss": 1.257, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.3297609233305853, |
|
"eval_loss": 1.0629926919937134, |
|
"eval_runtime": 2467.5248, |
|
"eval_samples_per_second": 3.242, |
|
"eval_steps_per_second": 0.203, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.3307914262159934, |
|
"grad_norm": 0.30766662381135884, |
|
"learning_rate": 0.00016929771314645454, |
|
"loss": 0.9716, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33182192910140146, |
|
"grad_norm": 0.3070232969296392, |
|
"learning_rate": 0.00016903787889598458, |
|
"loss": 1.3027, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.3328524319868096, |
|
"grad_norm": 0.29718017467704144, |
|
"learning_rate": 0.00016877715101238172, |
|
"loss": 1.053, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.33388293487221765, |
|
"grad_norm": 0.29124927902233755, |
|
"learning_rate": 0.00016851553287053342, |
|
"loss": 1.1027, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.3349134377576257, |
|
"grad_norm": 0.37490768596522844, |
|
"learning_rate": 0.00016825302785685077, |
|
"loss": 1.2816, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.3359439406430338, |
|
"grad_norm": 0.2909222245308242, |
|
"learning_rate": 0.00016798963936922467, |
|
"loss": 0.9477, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.33697444352844186, |
|
"grad_norm": 0.369759262778725, |
|
"learning_rate": 0.00016772537081698175, |
|
"loss": 1.2659, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.33800494641385, |
|
"grad_norm": 0.25527189055893823, |
|
"learning_rate": 0.00016746022562084026, |
|
"loss": 1.0374, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.33903544929925805, |
|
"grad_norm": 0.332617684071012, |
|
"learning_rate": 0.0001671942072128659, |
|
"loss": 1.1066, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.3400659521846661, |
|
"grad_norm": 0.3184886656626706, |
|
"learning_rate": 0.00016692731903642725, |
|
"loss": 1.2649, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.3410964550700742, |
|
"grad_norm": 0.2799293838329678, |
|
"learning_rate": 0.0001666595645461512, |
|
"loss": 0.9379, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.34212695795548226, |
|
"grad_norm": 0.3517762307499943, |
|
"learning_rate": 0.0001663909472078784, |
|
"loss": 1.252, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.3431574608408903, |
|
"grad_norm": 0.25384775837778967, |
|
"learning_rate": 0.0001661214704986182, |
|
"loss": 0.9847, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.34418796372629845, |
|
"grad_norm": 0.29743299437921455, |
|
"learning_rate": 0.00016585113790650388, |
|
"loss": 1.1201, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.3452184666117065, |
|
"grad_norm": 0.35589431095248486, |
|
"learning_rate": 0.00016557995293074715, |
|
"loss": 1.2206, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.3462489694971146, |
|
"grad_norm": 0.2911495340019745, |
|
"learning_rate": 0.00016530791908159323, |
|
"loss": 0.9072, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.34727947238252266, |
|
"grad_norm": 0.28789636914166633, |
|
"learning_rate": 0.0001650350398802751, |
|
"loss": 1.2946, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.3483099752679307, |
|
"grad_norm": 0.26335155835600166, |
|
"learning_rate": 0.0001647613188589682, |
|
"loss": 1.0242, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.34934047815333885, |
|
"grad_norm": 0.3325623900144522, |
|
"learning_rate": 0.00016448675956074444, |
|
"loss": 1.0931, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.3503709810387469, |
|
"grad_norm": 0.3773837775653675, |
|
"learning_rate": 0.0001642113655395266, |
|
"loss": 1.2456, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.351401483924155, |
|
"grad_norm": 0.285753347787117, |
|
"learning_rate": 0.00016393514036004204, |
|
"loss": 0.9662, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.35243198680956306, |
|
"grad_norm": 0.3046613746172406, |
|
"learning_rate": 0.0001636580875977769, |
|
"loss": 1.3162, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.3534624896949711, |
|
"grad_norm": 0.25099480129499735, |
|
"learning_rate": 0.0001633802108389295, |
|
"loss": 1.0061, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.35449299258037925, |
|
"grad_norm": 0.34150662422413947, |
|
"learning_rate": 0.00016310151368036408, |
|
"loss": 1.1411, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.3555234954657873, |
|
"grad_norm": 0.33392898782461256, |
|
"learning_rate": 0.00016282199972956425, |
|
"loss": 1.2154, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.3565539983511954, |
|
"grad_norm": 0.3016273908980527, |
|
"learning_rate": 0.00016254167260458622, |
|
"loss": 0.9269, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.35758450123660346, |
|
"grad_norm": 0.3297238572031139, |
|
"learning_rate": 0.000162260535934012, |
|
"loss": 1.2298, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.3586150041220115, |
|
"grad_norm": 0.26311212102325543, |
|
"learning_rate": 0.00016197859335690247, |
|
"loss": 1.0416, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.3596455070074196, |
|
"grad_norm": 0.35401650365663206, |
|
"learning_rate": 0.0001616958485227503, |
|
"loss": 1.0915, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.3606760098928277, |
|
"grad_norm": 0.3946861144577044, |
|
"learning_rate": 0.0001614123050914325, |
|
"loss": 1.2156, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.3617065127782358, |
|
"grad_norm": 0.31649222443853475, |
|
"learning_rate": 0.00016112796673316336, |
|
"loss": 0.9126, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.36273701566364386, |
|
"grad_norm": 0.3591291538700441, |
|
"learning_rate": 0.00016084283712844666, |
|
"loss": 1.2438, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.3637675185490519, |
|
"grad_norm": 0.2567906575863548, |
|
"learning_rate": 0.00016055691996802823, |
|
"loss": 0.9908, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.36479802143446, |
|
"grad_norm": 0.34039693260231785, |
|
"learning_rate": 0.00016027021895284808, |
|
"loss": 1.0576, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.3658285243198681, |
|
"grad_norm": 0.45929799156275763, |
|
"learning_rate": 0.0001599827377939925, |
|
"loss": 1.3099, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.3668590272052762, |
|
"grad_norm": 0.2736394916456097, |
|
"learning_rate": 0.00015969448021264606, |
|
"loss": 0.8622, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.36788953009068426, |
|
"grad_norm": 0.2884997182361836, |
|
"learning_rate": 0.00015940544994004334, |
|
"loss": 1.2272, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.3689200329760923, |
|
"grad_norm": 0.2392312698637367, |
|
"learning_rate": 0.00015911565071742088, |
|
"loss": 0.9626, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.3699505358615004, |
|
"grad_norm": 0.339213144942226, |
|
"learning_rate": 0.00015882508629596836, |
|
"loss": 1.1233, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37098103874690846, |
|
"grad_norm": 0.40881279626532063, |
|
"learning_rate": 0.00015853376043678053, |
|
"loss": 1.2129, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.37098103874690846, |
|
"eval_loss": 1.051790475845337, |
|
"eval_runtime": 2468.3063, |
|
"eval_samples_per_second": 3.241, |
|
"eval_steps_per_second": 0.203, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.3720115416323166, |
|
"grad_norm": 0.27225360643392454, |
|
"learning_rate": 0.00015824167691080802, |
|
"loss": 0.8599, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.37304204451772466, |
|
"grad_norm": 0.3655053849913508, |
|
"learning_rate": 0.00015794883949880894, |
|
"loss": 1.2174, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.3740725474031327, |
|
"grad_norm": 0.2810810355550597, |
|
"learning_rate": 0.00015765525199129966, |
|
"loss": 1.0166, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.3751030502885408, |
|
"grad_norm": 0.3332282628999955, |
|
"learning_rate": 0.000157360918188506, |
|
"loss": 1.1182, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.37613355317394886, |
|
"grad_norm": 0.3175764657865911, |
|
"learning_rate": 0.0001570658419003137, |
|
"loss": 1.2687, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.377164056059357, |
|
"grad_norm": 0.3120363543994925, |
|
"learning_rate": 0.00015677002694621948, |
|
"loss": 0.9812, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.37819455894476506, |
|
"grad_norm": 0.327602667442098, |
|
"learning_rate": 0.00015647347715528137, |
|
"loss": 1.1919, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.3792250618301731, |
|
"grad_norm": 0.2668526402245522, |
|
"learning_rate": 0.00015617619636606924, |
|
"loss": 0.9995, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.3802555647155812, |
|
"grad_norm": 0.31419704503426554, |
|
"learning_rate": 0.00015587818842661494, |
|
"loss": 1.1765, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38128606760098926, |
|
"grad_norm": 0.4282942251054684, |
|
"learning_rate": 0.00015557945719436278, |
|
"loss": 1.2522, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.3823165704863974, |
|
"grad_norm": 0.308069434736153, |
|
"learning_rate": 0.00015528000653611935, |
|
"loss": 0.8995, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.38334707337180546, |
|
"grad_norm": 0.272826512737121, |
|
"learning_rate": 0.0001549798403280036, |
|
"loss": 1.2827, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.3843775762572135, |
|
"grad_norm": 0.2498018349436216, |
|
"learning_rate": 0.0001546789624553966, |
|
"loss": 1.0308, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.3854080791426216, |
|
"grad_norm": 0.32988195624784494, |
|
"learning_rate": 0.00015437737681289128, |
|
"loss": 1.1726, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.38643858202802966, |
|
"grad_norm": 0.38213734379034847, |
|
"learning_rate": 0.00015407508730424206, |
|
"loss": 1.2046, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.38746908491343773, |
|
"grad_norm": 0.2839035313263252, |
|
"learning_rate": 0.00015377209784231424, |
|
"loss": 0.9713, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.38849958779884586, |
|
"grad_norm": 0.26153622554750505, |
|
"learning_rate": 0.00015346841234903337, |
|
"loss": 1.2771, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.3895300906842539, |
|
"grad_norm": 0.25504870927422546, |
|
"learning_rate": 0.0001531640347553345, |
|
"loss": 1.0141, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.390560593569662, |
|
"grad_norm": 0.33076548230805997, |
|
"learning_rate": 0.00015285896900111133, |
|
"loss": 1.1153, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39159109645507006, |
|
"grad_norm": 0.3682940168037353, |
|
"learning_rate": 0.00015255321903516516, |
|
"loss": 1.2387, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.39262159934047813, |
|
"grad_norm": 0.3030559581752516, |
|
"learning_rate": 0.00015224678881515377, |
|
"loss": 0.8932, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.39365210222588626, |
|
"grad_norm": 0.3016417378121846, |
|
"learning_rate": 0.00015193968230754024, |
|
"loss": 1.2566, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.3946826051112943, |
|
"grad_norm": 0.2562267990440701, |
|
"learning_rate": 0.00015163190348754162, |
|
"loss": 1.0584, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.3957131079967024, |
|
"grad_norm": 0.3152397653362513, |
|
"learning_rate": 0.00015132345633907734, |
|
"loss": 1.1119, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.39674361088211046, |
|
"grad_norm": 0.3550077716096075, |
|
"learning_rate": 0.0001510143448547178, |
|
"loss": 1.2196, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.39777411376751853, |
|
"grad_norm": 0.2713673784146358, |
|
"learning_rate": 0.00015070457303563268, |
|
"loss": 0.8809, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.3988046166529266, |
|
"grad_norm": 0.32482913652095136, |
|
"learning_rate": 0.000150394144891539, |
|
"loss": 1.2114, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.3998351195383347, |
|
"grad_norm": 0.2681392320249827, |
|
"learning_rate": 0.00015008306444064942, |
|
"loss": 1.0092, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4008656224237428, |
|
"grad_norm": 0.30813511160423507, |
|
"learning_rate": 0.00014977133570961997, |
|
"loss": 1.0587, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.40189612530915086, |
|
"grad_norm": 0.3045685029840853, |
|
"learning_rate": 0.00014945896273349827, |
|
"loss": 1.3074, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.40292662819455893, |
|
"grad_norm": 0.3178744984824384, |
|
"learning_rate": 0.00014914594955567099, |
|
"loss": 0.9477, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.403957131079967, |
|
"grad_norm": 0.36931867482484526, |
|
"learning_rate": 0.00014883230022781163, |
|
"loss": 1.2359, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.4049876339653751, |
|
"grad_norm": 0.27801383031198734, |
|
"learning_rate": 0.00014851801880982814, |
|
"loss": 1.0366, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.4060181368507832, |
|
"grad_norm": 0.31213479911432085, |
|
"learning_rate": 0.00014820310936981026, |
|
"loss": 1.1624, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.40704863973619126, |
|
"grad_norm": 0.33227538300256093, |
|
"learning_rate": 0.000147887575983977, |
|
"loss": 1.2724, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.40807914262159933, |
|
"grad_norm": 0.298687515913839, |
|
"learning_rate": 0.00014757142273662358, |
|
"loss": 0.8436, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.4091096455070074, |
|
"grad_norm": 0.36351887792750426, |
|
"learning_rate": 0.00014725465372006905, |
|
"loss": 1.2507, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41014014839241547, |
|
"grad_norm": 0.2702448610300903, |
|
"learning_rate": 0.0001469372730346028, |
|
"loss": 1.0191, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.4111706512778236, |
|
"grad_norm": 0.3224079902083336, |
|
"learning_rate": 0.00014661928478843186, |
|
"loss": 1.1598, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.41220115416323166, |
|
"grad_norm": 0.41798586070796323, |
|
"learning_rate": 0.00014630069309762753, |
|
"loss": 1.1939, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.41220115416323166, |
|
"eval_loss": 1.0405410528182983, |
|
"eval_runtime": 2474.052, |
|
"eval_samples_per_second": 3.234, |
|
"eval_steps_per_second": 0.202, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.41323165704863973, |
|
"grad_norm": 0.28365317528526945, |
|
"learning_rate": 0.00014598150208607212, |
|
"loss": 0.8639, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.4142621599340478, |
|
"grad_norm": 0.3214047753618349, |
|
"learning_rate": 0.00014566171588540572, |
|
"loss": 1.2183, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.41529266281945587, |
|
"grad_norm": 0.2644278825870432, |
|
"learning_rate": 0.00014534133863497252, |
|
"loss": 0.9564, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.416323165704864, |
|
"grad_norm": 0.32226077969266037, |
|
"learning_rate": 0.00014502037448176734, |
|
"loss": 1.0942, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.41735366859027206, |
|
"grad_norm": 0.4101159870787422, |
|
"learning_rate": 0.00014469882758038193, |
|
"loss": 1.244, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.41838417147568013, |
|
"grad_norm": 0.28153576445753414, |
|
"learning_rate": 0.00014437670209295112, |
|
"loss": 0.8947, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.4194146743610882, |
|
"grad_norm": 0.3785396934306716, |
|
"learning_rate": 0.0001440540021890992, |
|
"loss": 1.2432, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42044517724649627, |
|
"grad_norm": 0.2542390029018319, |
|
"learning_rate": 0.00014373073204588556, |
|
"loss": 1.0083, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.4214756801319044, |
|
"grad_norm": 0.3123729353287892, |
|
"learning_rate": 0.0001434068958477509, |
|
"loss": 1.1195, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.42250618301731246, |
|
"grad_norm": 0.39530507310232316, |
|
"learning_rate": 0.00014308249778646306, |
|
"loss": 1.2036, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.42353668590272053, |
|
"grad_norm": 0.2818105109815067, |
|
"learning_rate": 0.0001427575420610626, |
|
"loss": 0.8847, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.4245671887881286, |
|
"grad_norm": 0.3219044657747011, |
|
"learning_rate": 0.00014243203287780856, |
|
"loss": 1.2623, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.42559769167353667, |
|
"grad_norm": 0.2611111361591686, |
|
"learning_rate": 0.00014210597445012398, |
|
"loss": 1.0083, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.42662819455894474, |
|
"grad_norm": 0.33485314347370027, |
|
"learning_rate": 0.0001417793709985415, |
|
"loss": 1.1823, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.42765869744435286, |
|
"grad_norm": 0.34925710089096873, |
|
"learning_rate": 0.0001414522267506484, |
|
"loss": 1.2728, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.42868920032976093, |
|
"grad_norm": 0.25546314251507235, |
|
"learning_rate": 0.0001411245459410322, |
|
"loss": 0.8703, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.429719703215169, |
|
"grad_norm": 0.38324554510188286, |
|
"learning_rate": 0.00014079633281122573, |
|
"loss": 1.2551, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43075020610057707, |
|
"grad_norm": 0.27810663295608457, |
|
"learning_rate": 0.00014046759160965224, |
|
"loss": 1.0267, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.43178070898598514, |
|
"grad_norm": 0.35335784100847833, |
|
"learning_rate": 0.0001401383265915703, |
|
"loss": 1.1345, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.43281121187139326, |
|
"grad_norm": 0.4118966005270665, |
|
"learning_rate": 0.00013980854201901886, |
|
"loss": 1.1936, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.43384171475680133, |
|
"grad_norm": 0.3062362602687425, |
|
"learning_rate": 0.00013947824216076207, |
|
"loss": 0.9152, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.4348722176422094, |
|
"grad_norm": 0.29620498290186464, |
|
"learning_rate": 0.00013914743129223405, |
|
"loss": 1.2309, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.43590272052761747, |
|
"grad_norm": 0.2710224032495773, |
|
"learning_rate": 0.00013881611369548325, |
|
"loss": 1.0408, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.43693322341302554, |
|
"grad_norm": 0.3056464151796937, |
|
"learning_rate": 0.00013848429365911753, |
|
"loss": 1.1743, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.4379637262984336, |
|
"grad_norm": 0.3843468598801491, |
|
"learning_rate": 0.00013815197547824824, |
|
"loss": 1.2395, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.43899422918384173, |
|
"grad_norm": 0.2798396925326106, |
|
"learning_rate": 0.00013781916345443474, |
|
"loss": 0.8612, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.4400247320692498, |
|
"grad_norm": 0.3279250283208323, |
|
"learning_rate": 0.00013748586189562878, |
|
"loss": 1.2354, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44105523495465787, |
|
"grad_norm": 0.2735131812328209, |
|
"learning_rate": 0.00013715207511611876, |
|
"loss": 1.0026, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.44208573784006594, |
|
"grad_norm": 0.37219144396512693, |
|
"learning_rate": 0.0001368178074364737, |
|
"loss": 1.0704, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.443116240725474, |
|
"grad_norm": 0.46408445168045037, |
|
"learning_rate": 0.00013648306318348762, |
|
"loss": 1.2511, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.44414674361088213, |
|
"grad_norm": 0.2720164174270696, |
|
"learning_rate": 0.0001361478466901231, |
|
"loss": 0.8989, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.4451772464962902, |
|
"grad_norm": 0.3747167130586965, |
|
"learning_rate": 0.0001358121622954557, |
|
"loss": 1.3666, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.44620774938169827, |
|
"grad_norm": 0.2962954738156199, |
|
"learning_rate": 0.00013547601434461733, |
|
"loss": 0.9376, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.44723825226710634, |
|
"grad_norm": 0.3347575447270046, |
|
"learning_rate": 0.0001351394071887404, |
|
"loss": 1.0754, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.4482687551525144, |
|
"grad_norm": 0.37805918523823334, |
|
"learning_rate": 0.0001348023451849012, |
|
"loss": 1.2106, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.44929925803792253, |
|
"grad_norm": 0.302277885917108, |
|
"learning_rate": 0.00013446483269606362, |
|
"loss": 0.8481, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.4503297609233306, |
|
"grad_norm": 0.35890494559800096, |
|
"learning_rate": 0.00013412687409102277, |
|
"loss": 1.2101, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.45136026380873867, |
|
"grad_norm": 0.27832175193098935, |
|
"learning_rate": 0.00013378847374434814, |
|
"loss": 0.9563, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.45239076669414674, |
|
"grad_norm": 0.35752890078545335, |
|
"learning_rate": 0.0001334496360363274, |
|
"loss": 1.1166, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.4534212695795548, |
|
"grad_norm": 0.3208813709791692, |
|
"learning_rate": 0.00013311036535290925, |
|
"loss": 1.2658, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.4534212695795548, |
|
"eval_loss": 1.0313003063201904, |
|
"eval_runtime": 2462.8678, |
|
"eval_samples_per_second": 3.248, |
|
"eval_steps_per_second": 0.203, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.4544517724649629, |
|
"grad_norm": 0.3164662130947771, |
|
"learning_rate": 0.00013277066608564713, |
|
"loss": 0.9046, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.455482275350371, |
|
"grad_norm": 0.32760397119015733, |
|
"learning_rate": 0.0001324305426316418, |
|
"loss": 1.2596, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.45651277823577907, |
|
"grad_norm": 0.26879676205264913, |
|
"learning_rate": 0.000132089999393485, |
|
"loss": 0.9842, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.45754328112118714, |
|
"grad_norm": 0.35211465947054194, |
|
"learning_rate": 0.000131749040779202, |
|
"loss": 1.1149, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.4585737840065952, |
|
"grad_norm": 0.3640438187361373, |
|
"learning_rate": 0.0001314076712021949, |
|
"loss": 1.2521, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.4596042868920033, |
|
"grad_norm": 0.28859583596302096, |
|
"learning_rate": 0.0001310658950811852, |
|
"loss": 0.8689, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.4606347897774114, |
|
"grad_norm": 0.3245571156879321, |
|
"learning_rate": 0.00013072371684015688, |
|
"loss": 1.2013, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.46166529266281947, |
|
"grad_norm": 0.2630264452723511, |
|
"learning_rate": 0.00013038114090829892, |
|
"loss": 1.0054, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.46269579554822754, |
|
"grad_norm": 0.3629689887711568, |
|
"learning_rate": 0.00013003817171994807, |
|
"loss": 1.1201, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.4637262984336356, |
|
"grad_norm": 0.38051210660623236, |
|
"learning_rate": 0.00012969481371453135, |
|
"loss": 1.2291, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.4647568013190437, |
|
"grad_norm": 0.3038049492726604, |
|
"learning_rate": 0.00012935107133650885, |
|
"loss": 0.8923, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.46578730420445175, |
|
"grad_norm": 0.34319934420858916, |
|
"learning_rate": 0.00012900694903531586, |
|
"loss": 1.2325, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.46681780708985987, |
|
"grad_norm": 0.2729387501421985, |
|
"learning_rate": 0.0001286624512653055, |
|
"loss": 0.9893, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.46784830997526794, |
|
"grad_norm": 0.37018830848703915, |
|
"learning_rate": 0.00012831758248569097, |
|
"loss": 1.1021, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.468878812860676, |
|
"grad_norm": 0.36278206444854155, |
|
"learning_rate": 0.00012797234716048784, |
|
"loss": 1.2005, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.4699093157460841, |
|
"grad_norm": 0.2976065955292209, |
|
"learning_rate": 0.00012762674975845637, |
|
"loss": 0.8405, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47093981863149215, |
|
"grad_norm": 0.3358390029204746, |
|
"learning_rate": 0.00012728079475304345, |
|
"loss": 1.2722, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.47197032151690027, |
|
"grad_norm": 0.24689134910878455, |
|
"learning_rate": 0.0001269344866223249, |
|
"loss": 0.9856, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.47300082440230834, |
|
"grad_norm": 0.3183219956731036, |
|
"learning_rate": 0.00012658782984894743, |
|
"loss": 1.0672, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.4740313272877164, |
|
"grad_norm": 0.3220468985387059, |
|
"learning_rate": 0.00012624082892007064, |
|
"loss": 1.2301, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.4750618301731245, |
|
"grad_norm": 0.3012629746942751, |
|
"learning_rate": 0.00012589348832730882, |
|
"loss": 0.9345, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.47609233305853255, |
|
"grad_norm": 0.34208223291291867, |
|
"learning_rate": 0.00012554581256667296, |
|
"loss": 1.1783, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.47712283594394067, |
|
"grad_norm": 0.2633458752929658, |
|
"learning_rate": 0.00012519780613851254, |
|
"loss": 1.0127, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.47815333882934874, |
|
"grad_norm": 0.3245639160611015, |
|
"learning_rate": 0.00012484947354745714, |
|
"loss": 1.074, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.4791838417147568, |
|
"grad_norm": 0.4155838481655266, |
|
"learning_rate": 0.0001245008193023583, |
|
"loss": 1.2446, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.4802143446001649, |
|
"grad_norm": 0.29474111212781395, |
|
"learning_rate": 0.00012415184791623101, |
|
"loss": 0.8756, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.48124484748557295, |
|
"grad_norm": 0.38251199632272437, |
|
"learning_rate": 0.00012380256390619548, |
|
"loss": 1.2367, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.482275350370981, |
|
"grad_norm": 0.2876263709986664, |
|
"learning_rate": 0.00012345297179341844, |
|
"loss": 0.942, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.48330585325638914, |
|
"grad_norm": 0.3470650266011227, |
|
"learning_rate": 0.00012310307610305477, |
|
"loss": 1.1312, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.4843363561417972, |
|
"grad_norm": 0.3437397458970076, |
|
"learning_rate": 0.00012275288136418889, |
|
"loss": 1.2013, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.4853668590272053, |
|
"grad_norm": 0.2891400010122533, |
|
"learning_rate": 0.0001224023921097762, |
|
"loss": 0.875, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.48639736191261335, |
|
"grad_norm": 0.3343377419438735, |
|
"learning_rate": 0.0001220516128765842, |
|
"loss": 1.2263, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.4874278647980214, |
|
"grad_norm": 0.2815731947053776, |
|
"learning_rate": 0.00012170054820513401, |
|
"loss": 1.0279, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.48845836768342954, |
|
"grad_norm": 0.34918143666071294, |
|
"learning_rate": 0.00012134920263964147, |
|
"loss": 1.1073, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.4894888705688376, |
|
"grad_norm": 0.3378508040938572, |
|
"learning_rate": 0.00012099758072795832, |
|
"loss": 1.2131, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.4905193734542457, |
|
"grad_norm": 0.3404097695213611, |
|
"learning_rate": 0.00012064568702151335, |
|
"loss": 0.9207, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.49154987633965375, |
|
"grad_norm": 0.31871974527595615, |
|
"learning_rate": 0.0001202935260752535, |
|
"loss": 1.2815, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.4925803792250618, |
|
"grad_norm": 0.27586523947951513, |
|
"learning_rate": 0.00011994110244758496, |
|
"loss": 0.9798, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.4936108821104699, |
|
"grad_norm": 0.29258038219303606, |
|
"learning_rate": 0.00011958842070031395, |
|
"loss": 1.0612, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.494641384995878, |
|
"grad_norm": 0.37118694749476105, |
|
"learning_rate": 0.0001192354853985879, |
|
"loss": 1.1718, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.494641384995878, |
|
"eval_loss": 1.0186035633087158, |
|
"eval_runtime": 2474.4655, |
|
"eval_samples_per_second": 3.233, |
|
"eval_steps_per_second": 0.202, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.4956718878812861, |
|
"grad_norm": 0.3041203248300929, |
|
"learning_rate": 0.00011888230111083627, |
|
"loss": 0.8718, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.49670239076669415, |
|
"grad_norm": 0.34626490332091997, |
|
"learning_rate": 0.00011852887240871145, |
|
"loss": 1.2384, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.4977328936521022, |
|
"grad_norm": 0.2627628128026553, |
|
"learning_rate": 0.00011817520386702947, |
|
"loss": 0.9474, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.4987633965375103, |
|
"grad_norm": 0.3243643599593401, |
|
"learning_rate": 0.00011782130006371092, |
|
"loss": 1.0514, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.4997938994229184, |
|
"grad_norm": 0.38727382866372845, |
|
"learning_rate": 0.00011746716557972167, |
|
"loss": 1.2191, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.5008244023083265, |
|
"grad_norm": 0.29252070214667514, |
|
"learning_rate": 0.00011711280499901347, |
|
"loss": 0.8687, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.5018549051937345, |
|
"grad_norm": 0.32103295732093995, |
|
"learning_rate": 0.00011675822290846474, |
|
"loss": 1.2169, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.5028854080791426, |
|
"grad_norm": 0.2619181848445574, |
|
"learning_rate": 0.00011640342389782114, |
|
"loss": 0.9916, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.5039159109645507, |
|
"grad_norm": 0.36227995308769084, |
|
"learning_rate": 0.00011604841255963616, |
|
"loss": 1.0797, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.5049464138499588, |
|
"grad_norm": 0.4472357927883422, |
|
"learning_rate": 0.00011569319348921168, |
|
"loss": 1.2004, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.5059769167353668, |
|
"grad_norm": 0.2990090832488795, |
|
"learning_rate": 0.00011533777128453844, |
|
"loss": 0.905, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.5070074196207749, |
|
"grad_norm": 0.3421802694540464, |
|
"learning_rate": 0.00011498215054623664, |
|
"loss": 1.2502, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.5080379225061831, |
|
"grad_norm": 0.2619877856697061, |
|
"learning_rate": 0.00011462633587749629, |
|
"loss": 0.9878, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.5090684253915911, |
|
"grad_norm": 0.3573733070627249, |
|
"learning_rate": 0.00011427033188401768, |
|
"loss": 1.0799, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.5100989282769992, |
|
"grad_norm": 0.36110594973295185, |
|
"learning_rate": 0.00011391414317395167, |
|
"loss": 1.1831, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5111294311624073, |
|
"grad_norm": 0.29726968151818156, |
|
"learning_rate": 0.00011355777435784016, |
|
"loss": 0.9268, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.5121599340478153, |
|
"grad_norm": 0.38992649702696064, |
|
"learning_rate": 0.0001132012300485564, |
|
"loss": 1.2403, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.5131904369332234, |
|
"grad_norm": 0.31217559249274757, |
|
"learning_rate": 0.00011284451486124514, |
|
"loss": 0.9683, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.5142209398186315, |
|
"grad_norm": 0.3309516167120589, |
|
"learning_rate": 0.00011248763341326307, |
|
"loss": 1.018, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.5152514427040396, |
|
"grad_norm": 0.3669152523683029, |
|
"learning_rate": 0.00011213059032411897, |
|
"loss": 1.2018, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.5162819455894476, |
|
"grad_norm": 0.26794057215849676, |
|
"learning_rate": 0.00011177339021541387, |
|
"loss": 0.8112, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.5173124484748557, |
|
"grad_norm": 0.35964442987172796, |
|
"learning_rate": 0.00011141603771078133, |
|
"loss": 1.1878, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.5183429513602638, |
|
"grad_norm": 0.26756565994743753, |
|
"learning_rate": 0.00011105853743582751, |
|
"loss": 1.0244, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.5193734542456719, |
|
"grad_norm": 0.37262291618379195, |
|
"learning_rate": 0.00011070089401807129, |
|
"loss": 1.0956, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52040395713108, |
|
"grad_norm": 0.3862693430821137, |
|
"learning_rate": 0.0001103431120868845, |
|
"loss": 1.1777, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.5214344600164881, |
|
"grad_norm": 0.34456489334354296, |
|
"learning_rate": 0.00010998519627343182, |
|
"loss": 0.9219, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.5224649629018961, |
|
"grad_norm": 0.3363644750152242, |
|
"learning_rate": 0.00010962715121061095, |
|
"loss": 1.2219, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.5234954657873042, |
|
"grad_norm": 0.2773020803302085, |
|
"learning_rate": 0.00010926898153299259, |
|
"loss": 0.9829, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.5245259686727123, |
|
"grad_norm": 0.3387440320396881, |
|
"learning_rate": 0.00010891069187676051, |
|
"loss": 1.1028, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.5255564715581204, |
|
"grad_norm": 0.4105637833575777, |
|
"learning_rate": 0.00010855228687965138, |
|
"loss": 1.1622, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.5265869744435284, |
|
"grad_norm": 0.3203362105742117, |
|
"learning_rate": 0.000108193771180895, |
|
"loss": 0.8631, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.5276174773289365, |
|
"grad_norm": 0.3176938770218858, |
|
"learning_rate": 0.00010783514942115398, |
|
"loss": 1.1615, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.5286479802143446, |
|
"grad_norm": 0.27188600955614695, |
|
"learning_rate": 0.0001074764262424639, |
|
"loss": 1.0099, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.5296784830997526, |
|
"grad_norm": 0.3225534680387049, |
|
"learning_rate": 0.00010711760628817304, |
|
"loss": 1.1351, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.5307089859851608, |
|
"grad_norm": 0.39558907715108127, |
|
"learning_rate": 0.0001067586942028824, |
|
"loss": 1.1706, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.5317394888705689, |
|
"grad_norm": 0.28753930251222504, |
|
"learning_rate": 0.00010639969463238553, |
|
"loss": 0.8745, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.532769991755977, |
|
"grad_norm": 0.30734397473616565, |
|
"learning_rate": 0.00010604061222360828, |
|
"loss": 1.1448, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.533800494641385, |
|
"grad_norm": 0.26804645458820414, |
|
"learning_rate": 0.00010568145162454896, |
|
"loss": 0.9725, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.5348309975267931, |
|
"grad_norm": 0.3263053082797193, |
|
"learning_rate": 0.00010532221748421787, |
|
"loss": 1.0632, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.5358615004122012, |
|
"grad_norm": 0.3650347794639528, |
|
"learning_rate": 0.00010496291445257725, |
|
"loss": 1.1795, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.5358615004122012, |
|
"eval_loss": 1.0102049112319946, |
|
"eval_runtime": 2464.7805, |
|
"eval_samples_per_second": 3.246, |
|
"eval_steps_per_second": 0.203, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.5368920032976092, |
|
"grad_norm": 0.299206263893403, |
|
"learning_rate": 0.00010460354718048109, |
|
"loss": 0.8798, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.5379225061830173, |
|
"grad_norm": 0.32212757306557593, |
|
"learning_rate": 0.00010424412031961484, |
|
"loss": 1.2768, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.5389530090684254, |
|
"grad_norm": 0.2686223265118172, |
|
"learning_rate": 0.0001038846385224354, |
|
"loss": 0.9964, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.5399835119538334, |
|
"grad_norm": 0.31440179887584824, |
|
"learning_rate": 0.00010352510644211074, |
|
"loss": 1.0418, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.5410140148392415, |
|
"grad_norm": 0.3262137336119927, |
|
"learning_rate": 0.0001031655287324596, |
|
"loss": 1.2262, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.5420445177246497, |
|
"grad_norm": 0.2977115868800609, |
|
"learning_rate": 0.00010280591004789144, |
|
"loss": 0.867, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.5430750206100577, |
|
"grad_norm": 0.3481967708447838, |
|
"learning_rate": 0.00010244625504334609, |
|
"loss": 1.1666, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.5441055234954658, |
|
"grad_norm": 0.27201591179231566, |
|
"learning_rate": 0.00010208656837423351, |
|
"loss": 0.9024, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.5451360263808739, |
|
"grad_norm": 0.3829985152487614, |
|
"learning_rate": 0.00010172685469637351, |
|
"loss": 1.0713, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.546166529266282, |
|
"grad_norm": 0.36090749295144076, |
|
"learning_rate": 0.00010136711866593551, |
|
"loss": 1.1479, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.54719703215169, |
|
"grad_norm": 0.300126161900349, |
|
"learning_rate": 0.00010100736493937828, |
|
"loss": 0.8899, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.5482275350370981, |
|
"grad_norm": 0.33344984792579047, |
|
"learning_rate": 0.00010064759817338965, |
|
"loss": 1.2477, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.5492580379225062, |
|
"grad_norm": 0.2615632901768006, |
|
"learning_rate": 0.00010028782302482617, |
|
"loss": 0.9453, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.5502885408079142, |
|
"grad_norm": 0.36602450939184455, |
|
"learning_rate": 9.992804415065305e-05, |
|
"loss": 1.0671, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.5513190436933223, |
|
"grad_norm": 0.34898414138240497, |
|
"learning_rate": 9.956826620788352e-05, |
|
"loss": 1.1969, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.5523495465787304, |
|
"grad_norm": 0.28926741748856266, |
|
"learning_rate": 9.920849385351889e-05, |
|
"loss": 0.8264, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.5533800494641385, |
|
"grad_norm": 0.3470665476375764, |
|
"learning_rate": 9.884873174448811e-05, |
|
"loss": 1.2393, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.5544105523495466, |
|
"grad_norm": 0.25248382277496384, |
|
"learning_rate": 9.848898453758752e-05, |
|
"loss": 0.9752, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.5554410552349547, |
|
"grad_norm": 0.322941880174542, |
|
"learning_rate": 9.812925688942054e-05, |
|
"loss": 1.0431, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.5564715581203628, |
|
"grad_norm": 0.5004506241518043, |
|
"learning_rate": 9.776955345633739e-05, |
|
"loss": 1.1417, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.5575020610057708, |
|
"grad_norm": 0.2897297596123754, |
|
"learning_rate": 9.740987889437492e-05, |
|
"loss": 0.9032, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.5585325638911789, |
|
"grad_norm": 0.3767677442786407, |
|
"learning_rate": 9.705023785919623e-05, |
|
"loss": 1.1532, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.559563066776587, |
|
"grad_norm": 0.25503990950782773, |
|
"learning_rate": 9.669063500603049e-05, |
|
"loss": 0.9414, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.560593569661995, |
|
"grad_norm": 0.34636076916747177, |
|
"learning_rate": 9.633107498961251e-05, |
|
"loss": 1.0689, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.5616240725474031, |
|
"grad_norm": 0.39789228504761426, |
|
"learning_rate": 9.597156246412277e-05, |
|
"loss": 1.1464, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.5626545754328112, |
|
"grad_norm": 0.289581783982976, |
|
"learning_rate": 9.561210208312694e-05, |
|
"loss": 0.818, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.5636850783182193, |
|
"grad_norm": 0.31157714978145534, |
|
"learning_rate": 9.525269849951578e-05, |
|
"loss": 1.2706, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.5647155812036274, |
|
"grad_norm": 0.28372271464928434, |
|
"learning_rate": 9.489335636544476e-05, |
|
"loss": 0.9805, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.5657460840890355, |
|
"grad_norm": 0.33997031384483695, |
|
"learning_rate": 9.453408033227406e-05, |
|
"loss": 1.0937, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.5667765869744436, |
|
"grad_norm": 0.33417374019033036, |
|
"learning_rate": 9.417487505050816e-05, |
|
"loss": 1.2517, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.5678070898598516, |
|
"grad_norm": 0.3379621672677856, |
|
"learning_rate": 9.38157451697358e-05, |
|
"loss": 0.8737, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.5688375927452597, |
|
"grad_norm": 0.3589988298317977, |
|
"learning_rate": 9.345669533856961e-05, |
|
"loss": 1.1273, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.5698680956306678, |
|
"grad_norm": 0.26916205391576065, |
|
"learning_rate": 9.309773020458616e-05, |
|
"loss": 0.9603, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.5708985985160758, |
|
"grad_norm": 0.3159986677198098, |
|
"learning_rate": 9.273885441426562e-05, |
|
"loss": 1.077, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.5719291014014839, |
|
"grad_norm": 0.3412008285509142, |
|
"learning_rate": 9.238007261293176e-05, |
|
"loss": 1.1973, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.572959604286892, |
|
"grad_norm": 0.28834466612116516, |
|
"learning_rate": 9.202138944469168e-05, |
|
"loss": 0.8306, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.5739901071723, |
|
"grad_norm": 0.38911649440218604, |
|
"learning_rate": 9.16628095523758e-05, |
|
"loss": 1.141, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.5750206100577082, |
|
"grad_norm": 0.29683818561664493, |
|
"learning_rate": 9.130433757747772e-05, |
|
"loss": 0.9729, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.5760511129431163, |
|
"grad_norm": 0.3445235007246779, |
|
"learning_rate": 9.094597816009409e-05, |
|
"loss": 0.9819, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.5770816158285244, |
|
"grad_norm": 0.3661330469833344, |
|
"learning_rate": 9.058773593886469e-05, |
|
"loss": 1.1984, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.5770816158285244, |
|
"eval_loss": 1.0007954835891724, |
|
"eval_runtime": 2470.6884, |
|
"eval_samples_per_second": 3.238, |
|
"eval_steps_per_second": 0.202, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.5781121187139324, |
|
"grad_norm": 0.32153311729783396, |
|
"learning_rate": 9.022961555091226e-05, |
|
"loss": 0.8678, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.5791426215993405, |
|
"grad_norm": 0.36605355908566417, |
|
"learning_rate": 8.987162163178256e-05, |
|
"loss": 1.2198, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.5801731244847486, |
|
"grad_norm": 0.2691696483789003, |
|
"learning_rate": 8.951375881538421e-05, |
|
"loss": 0.9082, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.5812036273701566, |
|
"grad_norm": 0.35640081108055943, |
|
"learning_rate": 8.915603173392895e-05, |
|
"loss": 1.0764, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.5822341302555647, |
|
"grad_norm": 0.33493558837647336, |
|
"learning_rate": 8.87984450178715e-05, |
|
"loss": 1.1807, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.5832646331409728, |
|
"grad_norm": 0.31748865735569, |
|
"learning_rate": 8.84410032958497e-05, |
|
"loss": 0.8788, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.5842951360263808, |
|
"grad_norm": 0.36563160381436116, |
|
"learning_rate": 8.808371119462452e-05, |
|
"loss": 1.1127, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.5853256389117889, |
|
"grad_norm": 0.26343660014410963, |
|
"learning_rate": 8.772657333902027e-05, |
|
"loss": 0.9977, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.5863561417971971, |
|
"grad_norm": 0.3695249008293323, |
|
"learning_rate": 8.736959435186466e-05, |
|
"loss": 1.095, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.5873866446826052, |
|
"grad_norm": 0.35773020515251225, |
|
"learning_rate": 8.701277885392909e-05, |
|
"loss": 1.2322, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.5884171475680132, |
|
"grad_norm": 0.3269028767537368, |
|
"learning_rate": 8.665613146386854e-05, |
|
"loss": 0.8593, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.5894476504534213, |
|
"grad_norm": 0.3544711574787662, |
|
"learning_rate": 8.629965679816217e-05, |
|
"loss": 1.1952, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.5904781533388294, |
|
"grad_norm": 0.29639050674293943, |
|
"learning_rate": 8.594335947105328e-05, |
|
"loss": 0.9516, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.5915086562242374, |
|
"grad_norm": 0.3426897718147058, |
|
"learning_rate": 8.558724409448974e-05, |
|
"loss": 1.0779, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.5925391591096455, |
|
"grad_norm": 0.3740367235927869, |
|
"learning_rate": 8.523131527806423e-05, |
|
"loss": 1.1865, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.5935696619950536, |
|
"grad_norm": 0.31189806294414407, |
|
"learning_rate": 8.48755776289545e-05, |
|
"loss": 0.8847, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.5946001648804616, |
|
"grad_norm": 0.37495206238491713, |
|
"learning_rate": 8.452003575186394e-05, |
|
"loss": 1.1804, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.5956306677658697, |
|
"grad_norm": 0.2797206274129371, |
|
"learning_rate": 8.416469424896167e-05, |
|
"loss": 0.9762, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.5966611706512778, |
|
"grad_norm": 0.33348859645525075, |
|
"learning_rate": 8.380955771982332e-05, |
|
"loss": 1.0883, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.597691673536686, |
|
"grad_norm": 0.44145796098661705, |
|
"learning_rate": 8.345463076137125e-05, |
|
"loss": 1.166, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.598722176422094, |
|
"grad_norm": 0.286114945740939, |
|
"learning_rate": 8.309991796781511e-05, |
|
"loss": 0.8535, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.5997526793075021, |
|
"grad_norm": 0.34343359665934886, |
|
"learning_rate": 8.274542393059235e-05, |
|
"loss": 1.1742, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.6007831821929102, |
|
"grad_norm": 0.2624287823547147, |
|
"learning_rate": 8.239115323830889e-05, |
|
"loss": 0.955, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.6018136850783182, |
|
"grad_norm": 0.35154822360336263, |
|
"learning_rate": 8.203711047667958e-05, |
|
"loss": 1.0426, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.6028441879637263, |
|
"grad_norm": 0.3344934169585091, |
|
"learning_rate": 8.1683300228469e-05, |
|
"loss": 1.1785, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.6038746908491344, |
|
"grad_norm": 0.3209999819274115, |
|
"learning_rate": 8.132972707343192e-05, |
|
"loss": 0.8842, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.6049051937345424, |
|
"grad_norm": 0.39454283654575434, |
|
"learning_rate": 8.097639558825427e-05, |
|
"loss": 1.1693, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.6059356966199505, |
|
"grad_norm": 0.2878286096920221, |
|
"learning_rate": 8.062331034649376e-05, |
|
"loss": 0.8965, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.6069661995053586, |
|
"grad_norm": 0.33688522439048935, |
|
"learning_rate": 8.027047591852069e-05, |
|
"loss": 1.1381, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.6079967023907666, |
|
"grad_norm": 0.38189870355144323, |
|
"learning_rate": 7.991789687145873e-05, |
|
"loss": 1.1257, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.6090272052761748, |
|
"grad_norm": 0.3051536804305413, |
|
"learning_rate": 7.956557776912596e-05, |
|
"loss": 0.9044, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.6100577081615829, |
|
"grad_norm": 0.3451327149435641, |
|
"learning_rate": 7.921352317197574e-05, |
|
"loss": 1.2264, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.611088211046991, |
|
"grad_norm": 0.2827004152527063, |
|
"learning_rate": 7.886173763703757e-05, |
|
"loss": 0.9794, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.612118713932399, |
|
"grad_norm": 0.40169478150821714, |
|
"learning_rate": 7.851022571785819e-05, |
|
"loss": 1.1179, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.6131492168178071, |
|
"grad_norm": 0.4438701028140135, |
|
"learning_rate": 7.815899196444267e-05, |
|
"loss": 1.1662, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.6141797197032152, |
|
"grad_norm": 0.3254217005122457, |
|
"learning_rate": 7.780804092319547e-05, |
|
"loss": 0.8434, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.6152102225886232, |
|
"grad_norm": 0.3284145654312824, |
|
"learning_rate": 7.745737713686152e-05, |
|
"loss": 1.2109, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.6162407254740313, |
|
"grad_norm": 0.28117376745819467, |
|
"learning_rate": 7.710700514446762e-05, |
|
"loss": 0.955, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.6172712283594394, |
|
"grad_norm": 0.35764651325723473, |
|
"learning_rate": 7.675692948126345e-05, |
|
"loss": 1.032, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.6183017312448474, |
|
"grad_norm": 0.4045276967582808, |
|
"learning_rate": 7.640715467866307e-05, |
|
"loss": 1.157, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6183017312448474, |
|
"eval_loss": 0.9929932951927185, |
|
"eval_runtime": 2471.8115, |
|
"eval_samples_per_second": 3.236, |
|
"eval_steps_per_second": 0.202, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6193322341302555, |
|
"grad_norm": 0.3148405737434604, |
|
"learning_rate": 7.605768526418605e-05, |
|
"loss": 0.8743, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.6203627370156637, |
|
"grad_norm": 0.35877325985884195, |
|
"learning_rate": 7.57085257613991e-05, |
|
"loss": 1.2573, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.6213932399010718, |
|
"grad_norm": 0.2728312132512685, |
|
"learning_rate": 7.535968068985737e-05, |
|
"loss": 0.9663, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.6224237427864798, |
|
"grad_norm": 0.3236792895371986, |
|
"learning_rate": 7.501115456504595e-05, |
|
"loss": 1.1109, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.6234542456718879, |
|
"grad_norm": 0.40547856146336464, |
|
"learning_rate": 7.466295189832148e-05, |
|
"loss": 1.1882, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.624484748557296, |
|
"grad_norm": 0.26523329910607873, |
|
"learning_rate": 7.431507719685371e-05, |
|
"loss": 0.8277, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.625515251442704, |
|
"grad_norm": 0.3252197516332192, |
|
"learning_rate": 7.396753496356718e-05, |
|
"loss": 1.2303, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.6265457543281121, |
|
"grad_norm": 0.2664136991956755, |
|
"learning_rate": 7.362032969708297e-05, |
|
"loss": 0.9909, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.6275762572135202, |
|
"grad_norm": 0.3438706008672591, |
|
"learning_rate": 7.327346589166035e-05, |
|
"loss": 1.057, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.6286067600989282, |
|
"grad_norm": 0.40393507740091983, |
|
"learning_rate": 7.292694803713871e-05, |
|
"loss": 1.1674, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.6296372629843363, |
|
"grad_norm": 0.2966521374455327, |
|
"learning_rate": 7.258078061887947e-05, |
|
"loss": 0.8634, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.6306677658697445, |
|
"grad_norm": 0.3313252298668368, |
|
"learning_rate": 7.223496811770796e-05, |
|
"loss": 1.1705, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.6316982687551526, |
|
"grad_norm": 0.282517205910409, |
|
"learning_rate": 7.188951500985533e-05, |
|
"loss": 0.9568, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.6327287716405606, |
|
"grad_norm": 0.3423739475910178, |
|
"learning_rate": 7.154442576690083e-05, |
|
"loss": 1.1215, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.6337592745259687, |
|
"grad_norm": 0.3951313592694174, |
|
"learning_rate": 7.119970485571375e-05, |
|
"loss": 1.2196, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.6347897774113768, |
|
"grad_norm": 0.27968067577198935, |
|
"learning_rate": 7.08553567383956e-05, |
|
"loss": 0.8517, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.6358202802967848, |
|
"grad_norm": 0.3967375078625761, |
|
"learning_rate": 7.051138587222255e-05, |
|
"loss": 1.1345, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.6368507831821929, |
|
"grad_norm": 0.275638071848918, |
|
"learning_rate": 7.016779670958746e-05, |
|
"loss": 0.9428, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.637881286067601, |
|
"grad_norm": 0.3876563491000536, |
|
"learning_rate": 6.982459369794247e-05, |
|
"loss": 1.0172, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.638911788953009, |
|
"grad_norm": 0.39932395010881844, |
|
"learning_rate": 6.948178127974127e-05, |
|
"loss": 1.136, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.6399422918384171, |
|
"grad_norm": 0.344145718815036, |
|
"learning_rate": 6.913936389238174e-05, |
|
"loss": 0.9074, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.6409727947238252, |
|
"grad_norm": 0.35702868127871346, |
|
"learning_rate": 6.879734596814839e-05, |
|
"loss": 1.2245, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.6420032976092334, |
|
"grad_norm": 0.278189478468898, |
|
"learning_rate": 6.845573193415508e-05, |
|
"loss": 0.9795, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.6430338004946414, |
|
"grad_norm": 0.3820554155556976, |
|
"learning_rate": 6.811452621228766e-05, |
|
"loss": 1.0815, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.6440643033800495, |
|
"grad_norm": 0.42159702416774886, |
|
"learning_rate": 6.777373321914671e-05, |
|
"loss": 1.2058, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.6450948062654576, |
|
"grad_norm": 0.30363813905867076, |
|
"learning_rate": 6.743335736599045e-05, |
|
"loss": 0.853, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.6461253091508656, |
|
"grad_norm": 0.3626110839853577, |
|
"learning_rate": 6.709340305867762e-05, |
|
"loss": 1.1719, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.6471558120362737, |
|
"grad_norm": 0.2741980722413254, |
|
"learning_rate": 6.675387469761033e-05, |
|
"loss": 0.9857, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.6481863149216818, |
|
"grad_norm": 0.3458024372951965, |
|
"learning_rate": 6.641477667767738e-05, |
|
"loss": 1.0737, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.6492168178070898, |
|
"grad_norm": 0.3659993842021884, |
|
"learning_rate": 6.607611338819697e-05, |
|
"loss": 1.2332, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.6502473206924979, |
|
"grad_norm": 0.3134224056811165, |
|
"learning_rate": 6.573788921286028e-05, |
|
"loss": 0.833, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.651277823577906, |
|
"grad_norm": 0.31557728275482577, |
|
"learning_rate": 6.540010852967447e-05, |
|
"loss": 1.169, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.652308326463314, |
|
"grad_norm": 0.2745096877567603, |
|
"learning_rate": 6.506277571090613e-05, |
|
"loss": 0.9294, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.6533388293487222, |
|
"grad_norm": 0.37277814205936943, |
|
"learning_rate": 6.47258951230246e-05, |
|
"loss": 1.081, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.6543693322341303, |
|
"grad_norm": 0.38045711766200485, |
|
"learning_rate": 6.438947112664555e-05, |
|
"loss": 1.1493, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.6553998351195384, |
|
"grad_norm": 0.2706340588373179, |
|
"learning_rate": 6.405350807647444e-05, |
|
"loss": 0.8498, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.6564303380049464, |
|
"grad_norm": 0.3336018890725225, |
|
"learning_rate": 6.371801032125026e-05, |
|
"loss": 1.1778, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.6574608408903545, |
|
"grad_norm": 0.2913475821459678, |
|
"learning_rate": 6.338298220368912e-05, |
|
"loss": 0.9571, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.6584913437757626, |
|
"grad_norm": 0.36985494968453825, |
|
"learning_rate": 6.304842806042812e-05, |
|
"loss": 1.0778, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.6595218466611706, |
|
"grad_norm": 0.40407830600531813, |
|
"learning_rate": 6.271435222196916e-05, |
|
"loss": 1.1542, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.6595218466611706, |
|
"eval_loss": 0.9862346649169922, |
|
"eval_runtime": 2465.9653, |
|
"eval_samples_per_second": 3.244, |
|
"eval_steps_per_second": 0.203, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.6605523495465787, |
|
"grad_norm": 0.2524320710636017, |
|
"learning_rate": 6.238075901262293e-05, |
|
"loss": 0.8157, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.6615828524319868, |
|
"grad_norm": 0.3278601577235125, |
|
"learning_rate": 6.204765275045298e-05, |
|
"loss": 1.1461, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.6626133553173948, |
|
"grad_norm": 0.28938045624305053, |
|
"learning_rate": 6.171503774721966e-05, |
|
"loss": 0.9831, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.6636438582028029, |
|
"grad_norm": 0.3531127055799286, |
|
"learning_rate": 6.13829183083245e-05, |
|
"loss": 1.0438, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.6646743610882111, |
|
"grad_norm": 0.3337288007445976, |
|
"learning_rate": 6.105129873275435e-05, |
|
"loss": 1.2684, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.6657048639736192, |
|
"grad_norm": 0.30432663000679183, |
|
"learning_rate": 6.072018331302577e-05, |
|
"loss": 0.8405, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.6667353668590272, |
|
"grad_norm": 0.3481398036760903, |
|
"learning_rate": 6.038957633512957e-05, |
|
"loss": 1.1838, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.6677658697444353, |
|
"grad_norm": 0.27336340413608334, |
|
"learning_rate": 6.005948207847516e-05, |
|
"loss": 0.9207, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.6687963726298434, |
|
"grad_norm": 0.3440287779317674, |
|
"learning_rate": 5.97299048158352e-05, |
|
"loss": 1.0703, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.6698268755152514, |
|
"grad_norm": 0.39581616124415014, |
|
"learning_rate": 5.940084881329042e-05, |
|
"loss": 1.2125, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.6708573784006595, |
|
"grad_norm": 0.2935018842179493, |
|
"learning_rate": 5.907231833017424e-05, |
|
"loss": 0.8331, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.6718878812860676, |
|
"grad_norm": 0.3836289298282413, |
|
"learning_rate": 5.8744317619017755e-05, |
|
"loss": 1.2524, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.6729183841714756, |
|
"grad_norm": 0.2710041280727128, |
|
"learning_rate": 5.841685092549456e-05, |
|
"loss": 0.9405, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.6739488870568837, |
|
"grad_norm": 0.32969149065243836, |
|
"learning_rate": 5.8089922488365975e-05, |
|
"loss": 1.0393, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.6749793899422918, |
|
"grad_norm": 0.336443888291771, |
|
"learning_rate": 5.776353653942602e-05, |
|
"loss": 1.2373, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.6760098928277, |
|
"grad_norm": 0.30665104617457667, |
|
"learning_rate": 5.743769730344666e-05, |
|
"loss": 0.8681, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.677040395713108, |
|
"grad_norm": 0.3625802330226177, |
|
"learning_rate": 5.7112408998123256e-05, |
|
"loss": 1.2316, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.6780708985985161, |
|
"grad_norm": 0.2953674696158516, |
|
"learning_rate": 5.678767583401974e-05, |
|
"loss": 0.9204, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.6791014014839242, |
|
"grad_norm": 0.38225007623929924, |
|
"learning_rate": 5.646350201451438e-05, |
|
"loss": 1.066, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.6801319043693322, |
|
"grad_norm": 0.4406303775855105, |
|
"learning_rate": 5.613989173574512e-05, |
|
"loss": 1.1569, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.6811624072547403, |
|
"grad_norm": 0.2882802372851351, |
|
"learning_rate": 5.5816849186555386e-05, |
|
"loss": 0.86, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.6821929101401484, |
|
"grad_norm": 0.3780252315323207, |
|
"learning_rate": 5.549437854843995e-05, |
|
"loss": 1.1646, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.6832234130255564, |
|
"grad_norm": 0.2834828395630223, |
|
"learning_rate": 5.517248399549063e-05, |
|
"loss": 0.973, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.6842539159109645, |
|
"grad_norm": 0.3721150712296605, |
|
"learning_rate": 5.48511696943423e-05, |
|
"loss": 1.0507, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.6852844187963726, |
|
"grad_norm": 0.3748866522580312, |
|
"learning_rate": 5.4530439804119096e-05, |
|
"loss": 1.1834, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.6863149216817807, |
|
"grad_norm": 0.2887534766618149, |
|
"learning_rate": 5.4210298476380484e-05, |
|
"loss": 0.888, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.6873454245671888, |
|
"grad_norm": 0.3409152912595764, |
|
"learning_rate": 5.38907498550674e-05, |
|
"loss": 1.1606, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.6883759274525969, |
|
"grad_norm": 0.29326388422606586, |
|
"learning_rate": 5.357179807644887e-05, |
|
"loss": 0.8845, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.689406430338005, |
|
"grad_norm": 0.3589561770883435, |
|
"learning_rate": 5.3253447269068245e-05, |
|
"loss": 1.0743, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.690436933223413, |
|
"grad_norm": 0.38383916804262924, |
|
"learning_rate": 5.293570155368981e-05, |
|
"loss": 1.183, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.6914674361088211, |
|
"grad_norm": 0.3068092183719897, |
|
"learning_rate": 5.261856504324563e-05, |
|
"loss": 0.881, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.6924979389942292, |
|
"grad_norm": 0.36385771675879847, |
|
"learning_rate": 5.230204184278195e-05, |
|
"loss": 1.2467, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.6935284418796372, |
|
"grad_norm": 0.2907466758494882, |
|
"learning_rate": 5.198613604940649e-05, |
|
"loss": 0.9442, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.6945589447650453, |
|
"grad_norm": 0.3566322978977828, |
|
"learning_rate": 5.1670851752235025e-05, |
|
"loss": 1.0632, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.6955894476504534, |
|
"grad_norm": 0.36835384306712526, |
|
"learning_rate": 5.135619303233867e-05, |
|
"loss": 1.1819, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.6966199505358615, |
|
"grad_norm": 0.30530163857595344, |
|
"learning_rate": 5.104216396269109e-05, |
|
"loss": 0.8697, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.6976504534212696, |
|
"grad_norm": 0.3728398219157709, |
|
"learning_rate": 5.072876860811553e-05, |
|
"loss": 1.2065, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.6986809563066777, |
|
"grad_norm": 0.2719186126381508, |
|
"learning_rate": 5.0416011025232546e-05, |
|
"loss": 0.9332, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.6997114591920858, |
|
"grad_norm": 0.39560521280304406, |
|
"learning_rate": 5.010389526240719e-05, |
|
"loss": 1.0035, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.7007419620774938, |
|
"grad_norm": 0.45096201363438326, |
|
"learning_rate": 4.979242535969672e-05, |
|
"loss": 1.1648, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.7007419620774938, |
|
"eval_loss": 0.9801868796348572, |
|
"eval_runtime": 2471.0518, |
|
"eval_samples_per_second": 3.237, |
|
"eval_steps_per_second": 0.202, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.7017724649629019, |
|
"grad_norm": 0.3253388221391844, |
|
"learning_rate": 4.9481605348798435e-05, |
|
"loss": 0.8283, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.70280296784831, |
|
"grad_norm": 0.3382283949543721, |
|
"learning_rate": 4.917143925299728e-05, |
|
"loss": 1.1642, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.703833470733718, |
|
"grad_norm": 0.2650304501518648, |
|
"learning_rate": 4.886193108711378e-05, |
|
"loss": 0.9334, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.7048639736191261, |
|
"grad_norm": 0.3163351222719128, |
|
"learning_rate": 4.8553084857452426e-05, |
|
"loss": 1.0316, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.7058944765045342, |
|
"grad_norm": 0.4184991045727221, |
|
"learning_rate": 4.824490456174926e-05, |
|
"loss": 1.1313, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.7069249793899423, |
|
"grad_norm": 0.31621686065106913, |
|
"learning_rate": 4.7937394189120485e-05, |
|
"loss": 0.8658, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.7079554822753503, |
|
"grad_norm": 0.3252525212780396, |
|
"learning_rate": 4.763055772001086e-05, |
|
"loss": 1.1749, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.7089859851607585, |
|
"grad_norm": 0.28077212260765644, |
|
"learning_rate": 4.732439912614195e-05, |
|
"loss": 0.9195, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.7100164880461666, |
|
"grad_norm": 0.3404715984560569, |
|
"learning_rate": 4.7018922370460835e-05, |
|
"loss": 1.1004, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.7110469909315746, |
|
"grad_norm": 0.3572114714960076, |
|
"learning_rate": 4.671413140708893e-05, |
|
"loss": 1.1555, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.7120774938169827, |
|
"grad_norm": 0.31012926926152484, |
|
"learning_rate": 4.6410030181270546e-05, |
|
"loss": 0.8328, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.7131079967023908, |
|
"grad_norm": 0.3814244438557788, |
|
"learning_rate": 4.610662262932209e-05, |
|
"loss": 1.2221, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.7141384995877988, |
|
"grad_norm": 0.2587030449835817, |
|
"learning_rate": 4.5803912678580906e-05, |
|
"loss": 0.923, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.7151690024732069, |
|
"grad_norm": 0.36964920266690043, |
|
"learning_rate": 4.5501904247354474e-05, |
|
"loss": 1.0765, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.716199505358615, |
|
"grad_norm": 0.4007947343253882, |
|
"learning_rate": 4.520060124486989e-05, |
|
"loss": 1.0613, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.717230008244023, |
|
"grad_norm": 0.31355619965035286, |
|
"learning_rate": 4.4900007571222946e-05, |
|
"loss": 0.8406, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.7182605111294311, |
|
"grad_norm": 0.3709230553408382, |
|
"learning_rate": 4.460012711732795e-05, |
|
"loss": 1.202, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.7192910140148392, |
|
"grad_norm": 0.2663391335521852, |
|
"learning_rate": 4.430096376486713e-05, |
|
"loss": 0.957, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.7203215169002474, |
|
"grad_norm": 0.36149156644544367, |
|
"learning_rate": 4.4002521386240466e-05, |
|
"loss": 1.0885, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.7213520197856554, |
|
"grad_norm": 0.4116390911425264, |
|
"learning_rate": 4.3704803844515705e-05, |
|
"loss": 1.1649, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.7223825226710635, |
|
"grad_norm": 0.3337684750408035, |
|
"learning_rate": 4.3407814993378095e-05, |
|
"loss": 0.8993, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.7234130255564716, |
|
"grad_norm": 0.3496736243866778, |
|
"learning_rate": 4.311155867708071e-05, |
|
"loss": 1.1648, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.7244435284418796, |
|
"grad_norm": 0.28479774144881315, |
|
"learning_rate": 4.2816038730394656e-05, |
|
"loss": 0.9484, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.7254740313272877, |
|
"grad_norm": 0.36918418554896276, |
|
"learning_rate": 4.252125897855932e-05, |
|
"loss": 1.0583, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.7265045342126958, |
|
"grad_norm": 0.33704337919665334, |
|
"learning_rate": 4.222722323723294e-05, |
|
"loss": 1.1907, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.7275350370981039, |
|
"grad_norm": 0.3148307470632366, |
|
"learning_rate": 4.1933935312443286e-05, |
|
"loss": 0.8783, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.7285655399835119, |
|
"grad_norm": 0.3517810748272734, |
|
"learning_rate": 4.164139900053824e-05, |
|
"loss": 1.1717, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.72959604286892, |
|
"grad_norm": 0.2855842847206538, |
|
"learning_rate": 4.134961808813672e-05, |
|
"loss": 0.9112, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.7306265457543281, |
|
"grad_norm": 0.34108259740221386, |
|
"learning_rate": 4.1058596352079805e-05, |
|
"loss": 1.0142, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.7316570486397362, |
|
"grad_norm": 0.45538302782398454, |
|
"learning_rate": 4.076833755938153e-05, |
|
"loss": 1.0974, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.7326875515251443, |
|
"grad_norm": 0.31487990147891376, |
|
"learning_rate": 4.0478845467180506e-05, |
|
"loss": 0.8298, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.7337180544105524, |
|
"grad_norm": 0.36935273464286145, |
|
"learning_rate": 4.0190123822690965e-05, |
|
"loss": 1.1436, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.7347485572959604, |
|
"grad_norm": 0.27930316978222564, |
|
"learning_rate": 3.990217636315441e-05, |
|
"loss": 0.9989, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.7357790601813685, |
|
"grad_norm": 0.3714521960753657, |
|
"learning_rate": 3.961500681579129e-05, |
|
"loss": 1.1123, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.7368095630667766, |
|
"grad_norm": 0.43314908676280006, |
|
"learning_rate": 3.9328618897752566e-05, |
|
"loss": 1.1162, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.7378400659521847, |
|
"grad_norm": 0.29947090161111584, |
|
"learning_rate": 3.904301631607186e-05, |
|
"loss": 0.8351, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.7388705688375927, |
|
"grad_norm": 0.4143197770113969, |
|
"learning_rate": 3.875820276761717e-05, |
|
"loss": 1.2288, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.7399010717230008, |
|
"grad_norm": 0.2821180575460458, |
|
"learning_rate": 3.847418193904324e-05, |
|
"loss": 0.9842, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.7409315746084089, |
|
"grad_norm": 0.35222923714451126, |
|
"learning_rate": 3.8190957506743806e-05, |
|
"loss": 1.0377, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.7419620774938169, |
|
"grad_norm": 0.36695400370029363, |
|
"learning_rate": 3.790853313680389e-05, |
|
"loss": 1.1403, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.7419620774938169, |
|
"eval_loss": 0.9750496745109558, |
|
"eval_runtime": 2464.5637, |
|
"eval_samples_per_second": 3.246, |
|
"eval_steps_per_second": 0.203, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.7429925803792251, |
|
"grad_norm": 0.33168677598566876, |
|
"learning_rate": 3.7626912484952495e-05, |
|
"loss": 0.9223, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.7440230832646332, |
|
"grad_norm": 0.3685099675858659, |
|
"learning_rate": 3.734609919651523e-05, |
|
"loss": 1.22, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.7450535861500412, |
|
"grad_norm": 0.30519501707613905, |
|
"learning_rate": 3.706609690636703e-05, |
|
"loss": 0.9541, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.7460840890354493, |
|
"grad_norm": 0.3227606827521623, |
|
"learning_rate": 3.6786909238885215e-05, |
|
"loss": 1.002, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.7471145919208574, |
|
"grad_norm": 0.3907472086101187, |
|
"learning_rate": 3.650853980790262e-05, |
|
"loss": 1.1461, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.7481450948062655, |
|
"grad_norm": 0.2919486775395443, |
|
"learning_rate": 3.6230992216660664e-05, |
|
"loss": 0.8063, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.7491755976916735, |
|
"grad_norm": 0.32875886981694724, |
|
"learning_rate": 3.595427005776281e-05, |
|
"loss": 1.1514, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.7502061005770816, |
|
"grad_norm": 0.29158449116331836, |
|
"learning_rate": 3.5678376913128075e-05, |
|
"loss": 0.9759, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.7512366034624897, |
|
"grad_norm": 0.37631936130463955, |
|
"learning_rate": 3.540331635394458e-05, |
|
"loss": 0.9905, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.7522671063478977, |
|
"grad_norm": 0.3439508621054155, |
|
"learning_rate": 3.512909194062347e-05, |
|
"loss": 1.1949, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.7532976092333058, |
|
"grad_norm": 0.3146397920651041, |
|
"learning_rate": 3.485570722275264e-05, |
|
"loss": 0.8706, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.754328112118714, |
|
"grad_norm": 0.38258052872478915, |
|
"learning_rate": 3.458316573905087e-05, |
|
"loss": 1.2172, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.755358615004122, |
|
"grad_norm": 0.2793437890823054, |
|
"learning_rate": 3.4311471017322175e-05, |
|
"loss": 0.9391, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.7563891178895301, |
|
"grad_norm": 0.3643128847802364, |
|
"learning_rate": 3.4040626574409815e-05, |
|
"loss": 0.9938, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.7574196207749382, |
|
"grad_norm": 0.3709320339736749, |
|
"learning_rate": 3.377063591615113e-05, |
|
"loss": 1.1904, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.7584501236603463, |
|
"grad_norm": 0.33437847713940755, |
|
"learning_rate": 3.350150253733186e-05, |
|
"loss": 0.9233, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.7594806265457543, |
|
"grad_norm": 0.3607224445907033, |
|
"learning_rate": 3.3233229921641064e-05, |
|
"loss": 1.2125, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.7605111294311624, |
|
"grad_norm": 0.27198443746283474, |
|
"learning_rate": 3.296582154162604e-05, |
|
"loss": 0.9196, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.7615416323165705, |
|
"grad_norm": 0.3668276085455235, |
|
"learning_rate": 3.2699280858647327e-05, |
|
"loss": 1.0335, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.7625721352019785, |
|
"grad_norm": 0.431636600953154, |
|
"learning_rate": 3.2433611322833845e-05, |
|
"loss": 1.1378, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.7636026380873866, |
|
"grad_norm": 0.3284008231342369, |
|
"learning_rate": 3.216881637303839e-05, |
|
"loss": 0.8308, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.7646331409727948, |
|
"grad_norm": 0.3443882838132438, |
|
"learning_rate": 3.190489943679297e-05, |
|
"loss": 1.1797, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.7656636438582028, |
|
"grad_norm": 0.30472428853778355, |
|
"learning_rate": 3.164186393026445e-05, |
|
"loss": 0.8953, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.7666941467436109, |
|
"grad_norm": 0.346855761423586, |
|
"learning_rate": 3.137971325821054e-05, |
|
"loss": 1.1678, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.767724649629019, |
|
"grad_norm": 0.3586318210457773, |
|
"learning_rate": 3.111845081393542e-05, |
|
"loss": 1.1801, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.768755152514427, |
|
"grad_norm": 0.3012111522622655, |
|
"learning_rate": 3.0858079979245965e-05, |
|
"loss": 0.8259, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.7697856553998351, |
|
"grad_norm": 0.3675771233203778, |
|
"learning_rate": 3.059860412440811e-05, |
|
"loss": 1.1379, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.7708161582852432, |
|
"grad_norm": 0.2997972891151249, |
|
"learning_rate": 3.0340026608102902e-05, |
|
"loss": 0.9012, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.7718466611706513, |
|
"grad_norm": 0.3350924890911528, |
|
"learning_rate": 3.008235077738334e-05, |
|
"loss": 1.0476, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.7728771640560593, |
|
"grad_norm": 0.42578144522976874, |
|
"learning_rate": 2.9825579967630846e-05, |
|
"loss": 1.2014, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.7739076669414674, |
|
"grad_norm": 0.31905668249953223, |
|
"learning_rate": 2.956971750251215e-05, |
|
"loss": 0.9089, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.7749381698268755, |
|
"grad_norm": 0.3320270260748608, |
|
"learning_rate": 2.9314766693936356e-05, |
|
"loss": 1.1712, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.7759686727122836, |
|
"grad_norm": 0.26880506131338233, |
|
"learning_rate": 2.906073084201191e-05, |
|
"loss": 0.9532, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.7769991755976917, |
|
"grad_norm": 0.360515318139803, |
|
"learning_rate": 2.8807613235004037e-05, |
|
"loss": 1.0235, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.7780296784830998, |
|
"grad_norm": 0.40012910311865413, |
|
"learning_rate": 2.855541714929206e-05, |
|
"loss": 1.1292, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.7790601813685079, |
|
"grad_norm": 0.3056042121876613, |
|
"learning_rate": 2.8304145849327036e-05, |
|
"loss": 0.7814, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.7800906842539159, |
|
"grad_norm": 0.3475474680120366, |
|
"learning_rate": 2.8053802587589538e-05, |
|
"loss": 1.1668, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.781121187139324, |
|
"grad_norm": 0.2843659913958508, |
|
"learning_rate": 2.7804390604547557e-05, |
|
"loss": 0.9237, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.7821516900247321, |
|
"grad_norm": 0.36497922605390365, |
|
"learning_rate": 2.7555913128614398e-05, |
|
"loss": 1.0276, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.7831821929101401, |
|
"grad_norm": 0.39023528771913, |
|
"learning_rate": 2.7308373376107142e-05, |
|
"loss": 1.1268, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.7831821929101401, |
|
"eval_loss": 0.9705116748809814, |
|
"eval_runtime": 2662.1546, |
|
"eval_samples_per_second": 3.005, |
|
"eval_steps_per_second": 0.188, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.7842126957955482, |
|
"grad_norm": 0.3292637316481058, |
|
"learning_rate": 2.706177455120482e-05, |
|
"loss": 0.8586, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.7852431986809563, |
|
"grad_norm": 0.3463786306953725, |
|
"learning_rate": 2.681611984590696e-05, |
|
"loss": 1.1979, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.7862737015663643, |
|
"grad_norm": 0.3033390300653325, |
|
"learning_rate": 2.6571412439992437e-05, |
|
"loss": 0.9448, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.7873042044517725, |
|
"grad_norm": 0.3744585100233523, |
|
"learning_rate": 2.6327655500978076e-05, |
|
"loss": 1.0825, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.7883347073371806, |
|
"grad_norm": 0.3888535219196645, |
|
"learning_rate": 2.608485218407779e-05, |
|
"loss": 1.1602, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.7893652102225887, |
|
"grad_norm": 0.3281836015354183, |
|
"learning_rate": 2.5843005632161787e-05, |
|
"loss": 0.8071, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.7903957131079967, |
|
"grad_norm": 0.44764397719301374, |
|
"learning_rate": 2.5602118975715683e-05, |
|
"loss": 1.223, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.7914262159934048, |
|
"grad_norm": 0.2605243101554053, |
|
"learning_rate": 2.5362195332800253e-05, |
|
"loss": 0.9269, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.7924567188788129, |
|
"grad_norm": 0.34490759623743283, |
|
"learning_rate": 2.5123237809010836e-05, |
|
"loss": 0.9951, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.7934872217642209, |
|
"grad_norm": 0.45219071339330563, |
|
"learning_rate": 2.4885249497437223e-05, |
|
"loss": 1.1591, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.794517724649629, |
|
"grad_norm": 0.3701824230659728, |
|
"learning_rate": 2.4648233478623705e-05, |
|
"loss": 0.9237, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.7955482275350371, |
|
"grad_norm": 0.3540574000256143, |
|
"learning_rate": 2.4412192820529034e-05, |
|
"loss": 1.1936, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.7965787304204451, |
|
"grad_norm": 0.2979316113245556, |
|
"learning_rate": 2.4177130578486885e-05, |
|
"loss": 0.8818, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.7976092333058532, |
|
"grad_norm": 0.3972288442858004, |
|
"learning_rate": 2.3943049795166126e-05, |
|
"loss": 1.0352, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.7986397361912614, |
|
"grad_norm": 0.43218350490127366, |
|
"learning_rate": 2.370995350053157e-05, |
|
"loss": 1.141, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.7996702390766695, |
|
"grad_norm": 0.33536259992407524, |
|
"learning_rate": 2.3477844711804708e-05, |
|
"loss": 0.8845, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.8007007419620775, |
|
"grad_norm": 0.3715539599255406, |
|
"learning_rate": 2.3246726433424716e-05, |
|
"loss": 1.1807, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.8017312448474856, |
|
"grad_norm": 0.27142620043338195, |
|
"learning_rate": 2.301660165700936e-05, |
|
"loss": 0.9382, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.8027617477328937, |
|
"grad_norm": 0.3200117930180246, |
|
"learning_rate": 2.2787473361316592e-05, |
|
"loss": 1.1648, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.8037922506183017, |
|
"grad_norm": 0.3832079088616207, |
|
"learning_rate": 2.2559344512205705e-05, |
|
"loss": 1.1815, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.8048227535037098, |
|
"grad_norm": 0.28512724726672417, |
|
"learning_rate": 2.233221806259903e-05, |
|
"loss": 0.7237, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.8058532563891179, |
|
"grad_norm": 0.347779304749182, |
|
"learning_rate": 2.2106096952443888e-05, |
|
"loss": 1.1468, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.8068837592745259, |
|
"grad_norm": 0.27344000922062256, |
|
"learning_rate": 2.188098410867424e-05, |
|
"loss": 0.967, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.807914262159934, |
|
"grad_norm": 0.35776849097052316, |
|
"learning_rate": 2.165688244517299e-05, |
|
"loss": 1.0351, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.8089447650453421, |
|
"grad_norm": 0.6146898482449163, |
|
"learning_rate": 2.143379486273428e-05, |
|
"loss": 1.1856, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.8099752679307503, |
|
"grad_norm": 0.37385831712675915, |
|
"learning_rate": 2.1211724249025787e-05, |
|
"loss": 0.9441, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.8110057708161583, |
|
"grad_norm": 0.33568983400788555, |
|
"learning_rate": 2.099067347855157e-05, |
|
"loss": 1.2175, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.8120362737015664, |
|
"grad_norm": 0.2905285106790405, |
|
"learning_rate": 2.077064541261462e-05, |
|
"loss": 0.9725, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.8130667765869745, |
|
"grad_norm": 0.3292103830025177, |
|
"learning_rate": 2.0551642899279975e-05, |
|
"loss": 1.0363, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.8140972794723825, |
|
"grad_norm": 0.4380964265869986, |
|
"learning_rate": 2.0333668773337866e-05, |
|
"loss": 1.1014, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.8151277823577906, |
|
"grad_norm": 0.3297438808755647, |
|
"learning_rate": 2.0116725856266926e-05, |
|
"loss": 0.8749, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.8161582852431987, |
|
"grad_norm": 0.3404142147430593, |
|
"learning_rate": 1.9900816956197698e-05, |
|
"loss": 1.2154, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.8171887881286067, |
|
"grad_norm": 0.28990266176476354, |
|
"learning_rate": 1.9685944867876373e-05, |
|
"loss": 0.9402, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.8182192910140148, |
|
"grad_norm": 0.3750595800822031, |
|
"learning_rate": 1.9472112372628536e-05, |
|
"loss": 1.0682, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.8192497938994229, |
|
"grad_norm": 0.4415444668426194, |
|
"learning_rate": 1.9259322238323095e-05, |
|
"loss": 1.1201, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.8202802967848309, |
|
"grad_norm": 0.31061384334227865, |
|
"learning_rate": 1.9047577219336665e-05, |
|
"loss": 0.8734, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.8213107996702391, |
|
"grad_norm": 0.3577711991948567, |
|
"learning_rate": 1.8836880056517658e-05, |
|
"loss": 1.1896, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.8223413025556472, |
|
"grad_norm": 0.2862925377080917, |
|
"learning_rate": 1.862723347715103e-05, |
|
"loss": 0.9206, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.8233718054410553, |
|
"grad_norm": 0.36692123404130295, |
|
"learning_rate": 1.841864019492282e-05, |
|
"loss": 1.0504, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.8244023083264633, |
|
"grad_norm": 0.3600663907207253, |
|
"learning_rate": 1.821110290988509e-05, |
|
"loss": 1.2122, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8244023083264633, |
|
"eval_loss": 0.9671783447265625, |
|
"eval_runtime": 2672.6929, |
|
"eval_samples_per_second": 2.993, |
|
"eval_steps_per_second": 0.187, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8254328112118714, |
|
"grad_norm": 0.3081888529721894, |
|
"learning_rate": 1.8004624308421026e-05, |
|
"loss": 0.7821, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.8264633140972795, |
|
"grad_norm": 0.36648973755342457, |
|
"learning_rate": 1.7799207063210044e-05, |
|
"loss": 1.2144, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.8274938169826875, |
|
"grad_norm": 0.2898792198270504, |
|
"learning_rate": 1.759485383319326e-05, |
|
"loss": 0.9427, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.8285243198680956, |
|
"grad_norm": 0.38174814126983564, |
|
"learning_rate": 1.7391567263539144e-05, |
|
"loss": 1.0154, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.8295548227535037, |
|
"grad_norm": 0.4742619716693875, |
|
"learning_rate": 1.7189349985609115e-05, |
|
"loss": 1.0813, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.8305853256389117, |
|
"grad_norm": 0.3664614977301218, |
|
"learning_rate": 1.6988204616923666e-05, |
|
"loss": 0.8368, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.8316158285243199, |
|
"grad_norm": 0.43325455353643755, |
|
"learning_rate": 1.6788133761128312e-05, |
|
"loss": 1.1778, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.832646331409728, |
|
"grad_norm": 0.29420614649546306, |
|
"learning_rate": 1.658914000795999e-05, |
|
"loss": 0.922, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.8336768342951361, |
|
"grad_norm": 0.3437287490008131, |
|
"learning_rate": 1.639122593321357e-05, |
|
"loss": 1.0901, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.8347073371805441, |
|
"grad_norm": 0.41394098029113957, |
|
"learning_rate": 1.6194394098708377e-05, |
|
"loss": 1.1766, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.8357378400659522, |
|
"grad_norm": 0.3399407346712763, |
|
"learning_rate": 1.59986470522551e-05, |
|
"loss": 0.8491, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.8367683429513603, |
|
"grad_norm": 0.3655997093950039, |
|
"learning_rate": 1.580398732762297e-05, |
|
"loss": 1.1981, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.8377988458367683, |
|
"grad_norm": 0.30216864477326527, |
|
"learning_rate": 1.5610417444506664e-05, |
|
"loss": 0.9703, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.8388293487221764, |
|
"grad_norm": 0.3993955860213197, |
|
"learning_rate": 1.541793990849387e-05, |
|
"loss": 1.0866, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.8398598516075845, |
|
"grad_norm": 0.40765142147078065, |
|
"learning_rate": 1.522655721103291e-05, |
|
"loss": 1.1578, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.8408903544929925, |
|
"grad_norm": 0.3132479277395683, |
|
"learning_rate": 1.5036271829400294e-05, |
|
"loss": 0.8317, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.8419208573784006, |
|
"grad_norm": 0.353057034757265, |
|
"learning_rate": 1.4847086226668872e-05, |
|
"loss": 1.1387, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.8429513602638088, |
|
"grad_norm": 0.28714788090256477, |
|
"learning_rate": 1.4659002851675774e-05, |
|
"loss": 0.9363, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.8439818631492169, |
|
"grad_norm": 0.3558290938292747, |
|
"learning_rate": 1.447202413899078e-05, |
|
"loss": 0.9974, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.8450123660346249, |
|
"grad_norm": 0.36577045780008416, |
|
"learning_rate": 1.4286152508884898e-05, |
|
"loss": 1.1466, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.846042868920033, |
|
"grad_norm": 0.3010779731099561, |
|
"learning_rate": 1.4101390367298861e-05, |
|
"loss": 0.7895, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.8470733718054411, |
|
"grad_norm": 0.3541525630578856, |
|
"learning_rate": 1.3917740105812094e-05, |
|
"loss": 1.1429, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.8481038746908491, |
|
"grad_norm": 0.28819494987192507, |
|
"learning_rate": 1.3735204101611776e-05, |
|
"loss": 0.9335, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.8491343775762572, |
|
"grad_norm": 0.34823589325876386, |
|
"learning_rate": 1.355378471746196e-05, |
|
"loss": 1.0577, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.8501648804616653, |
|
"grad_norm": 0.34950447449734157, |
|
"learning_rate": 1.3373484301673145e-05, |
|
"loss": 1.1231, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.8511953833470733, |
|
"grad_norm": 0.29975822569477556, |
|
"learning_rate": 1.3194305188071732e-05, |
|
"loss": 0.8424, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.8522258862324814, |
|
"grad_norm": 0.390420435835868, |
|
"learning_rate": 1.301624969596985e-05, |
|
"loss": 1.1329, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.8532563891178895, |
|
"grad_norm": 0.2911183426984063, |
|
"learning_rate": 1.2839320130135468e-05, |
|
"loss": 0.9841, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.8542868920032977, |
|
"grad_norm": 0.3500369252129129, |
|
"learning_rate": 1.266351878076234e-05, |
|
"loss": 1.0513, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.8553173948887057, |
|
"grad_norm": 0.4017098058926308, |
|
"learning_rate": 1.2488847923440483e-05, |
|
"loss": 1.1624, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.8563478977741138, |
|
"grad_norm": 0.3441542578051488, |
|
"learning_rate": 1.2315309819126852e-05, |
|
"loss": 0.8611, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.8573784006595219, |
|
"grad_norm": 0.36811687971995544, |
|
"learning_rate": 1.2142906714115787e-05, |
|
"loss": 1.2298, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.8584089035449299, |
|
"grad_norm": 0.2934040538277352, |
|
"learning_rate": 1.197164084001009e-05, |
|
"loss": 0.9825, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.859439406430338, |
|
"grad_norm": 0.3571442419781514, |
|
"learning_rate": 1.1801514413692239e-05, |
|
"loss": 1.0296, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.8604699093157461, |
|
"grad_norm": 0.4388041060818487, |
|
"learning_rate": 1.1632529637295475e-05, |
|
"loss": 1.1665, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.8615004122011541, |
|
"grad_norm": 0.3211936804672997, |
|
"learning_rate": 1.1464688698175497e-05, |
|
"loss": 0.8568, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.8625309150865622, |
|
"grad_norm": 0.38975409701266794, |
|
"learning_rate": 1.1297993768881998e-05, |
|
"loss": 1.1854, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.8635614179719703, |
|
"grad_norm": 0.27934297465783847, |
|
"learning_rate": 1.113244700713063e-05, |
|
"loss": 0.8697, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.8645919208573783, |
|
"grad_norm": 0.39072549944659307, |
|
"learning_rate": 1.0968050555775067e-05, |
|
"loss": 1.0164, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.8656224237427865, |
|
"grad_norm": 0.4998393287633088, |
|
"learning_rate": 1.0804806542779223e-05, |
|
"loss": 1.0571, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.8656224237427865, |
|
"eval_loss": 0.9649366140365601, |
|
"eval_runtime": 2477.8624, |
|
"eval_samples_per_second": 3.229, |
|
"eval_steps_per_second": 0.202, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.8666529266281946, |
|
"grad_norm": 0.30161833311373715, |
|
"learning_rate": 1.0642717081189735e-05, |
|
"loss": 0.8134, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.8676834295136027, |
|
"grad_norm": 0.3714099835276263, |
|
"learning_rate": 1.0481784269108664e-05, |
|
"loss": 1.2136, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.8687139323990107, |
|
"grad_norm": 0.29858155300200195, |
|
"learning_rate": 1.032201018966621e-05, |
|
"loss": 0.9334, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.8697444352844188, |
|
"grad_norm": 0.36365152527356986, |
|
"learning_rate": 1.0163396910993883e-05, |
|
"loss": 1.0167, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.8707749381698269, |
|
"grad_norm": 0.3926164945656461, |
|
"learning_rate": 1.0005946486197648e-05, |
|
"loss": 1.1597, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.8718054410552349, |
|
"grad_norm": 0.31631364428210557, |
|
"learning_rate": 9.849660953331363e-06, |
|
"loss": 0.8626, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.872835943940643, |
|
"grad_norm": 0.397012339843304, |
|
"learning_rate": 9.694542335370437e-06, |
|
"loss": 1.1559, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.8738664468260511, |
|
"grad_norm": 0.27478860653538173, |
|
"learning_rate": 9.540592640185597e-06, |
|
"loss": 0.9427, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.8748969497114591, |
|
"grad_norm": 0.34552863820949387, |
|
"learning_rate": 9.387813860516915e-06, |
|
"loss": 1.0141, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.8759274525968672, |
|
"grad_norm": 0.4001075016221677, |
|
"learning_rate": 9.236207973948063e-06, |
|
"loss": 1.1693, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.8769579554822754, |
|
"grad_norm": 0.3089362146851165, |
|
"learning_rate": 9.085776942880608e-06, |
|
"loss": 0.8552, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.8779884583676835, |
|
"grad_norm": 0.3341234310512727, |
|
"learning_rate": 8.936522714508678e-06, |
|
"loss": 1.1562, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.8790189612530915, |
|
"grad_norm": 0.2798328581056808, |
|
"learning_rate": 8.788447220793806e-06, |
|
"loss": 0.99, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.8800494641384996, |
|
"grad_norm": 0.39837992045978476, |
|
"learning_rate": 8.641552378439776e-06, |
|
"loss": 1.1073, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.8810799670239077, |
|
"grad_norm": 0.4106276064285754, |
|
"learning_rate": 8.495840088868024e-06, |
|
"loss": 1.1004, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.8821104699093157, |
|
"grad_norm": 0.37910153433606, |
|
"learning_rate": 8.351312238192787e-06, |
|
"loss": 0.9667, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.8831409727947238, |
|
"grad_norm": 0.3835389510278448, |
|
"learning_rate": 8.207970697196866e-06, |
|
"loss": 1.1637, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.8841714756801319, |
|
"grad_norm": 0.2996106330164154, |
|
"learning_rate": 8.065817321307367e-06, |
|
"loss": 0.8998, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.88520197856554, |
|
"grad_norm": 0.37985003315229504, |
|
"learning_rate": 7.924853950571642e-06, |
|
"loss": 0.9981, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.886232481450948, |
|
"grad_norm": 0.3734475653137268, |
|
"learning_rate": 7.78508240963347e-06, |
|
"loss": 1.1901, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.8872629843363561, |
|
"grad_norm": 0.32917519272254847, |
|
"learning_rate": 7.646504507709563e-06, |
|
"loss": 0.8026, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.8882934872217643, |
|
"grad_norm": 0.3184623209600695, |
|
"learning_rate": 7.50912203856593e-06, |
|
"loss": 1.1705, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.8893239901071723, |
|
"grad_norm": 0.27564137851589965, |
|
"learning_rate": 7.372936780494877e-06, |
|
"loss": 0.8968, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 0.8903544929925804, |
|
"grad_norm": 0.37744723978180933, |
|
"learning_rate": 7.237950496291856e-06, |
|
"loss": 0.9825, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.8913849958779885, |
|
"grad_norm": 0.3497470810206515, |
|
"learning_rate": 7.104164933232649e-06, |
|
"loss": 1.1498, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.8924154987633965, |
|
"grad_norm": 0.314664899690108, |
|
"learning_rate": 6.971581823050832e-06, |
|
"loss": 0.8124, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.8934460016488046, |
|
"grad_norm": 0.3437097611977056, |
|
"learning_rate": 6.840202881915325e-06, |
|
"loss": 1.1555, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 0.8944765045342127, |
|
"grad_norm": 0.28333067118500105, |
|
"learning_rate": 6.710029810408092e-06, |
|
"loss": 0.899, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.8955070074196207, |
|
"grad_norm": 0.39572557163512656, |
|
"learning_rate": 6.581064293502293e-06, |
|
"loss": 1.0465, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 0.8965375103050288, |
|
"grad_norm": 0.37753981586149277, |
|
"learning_rate": 6.453308000540304e-06, |
|
"loss": 1.1223, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.8975680131904369, |
|
"grad_norm": 0.33675472826608077, |
|
"learning_rate": 6.326762585212209e-06, |
|
"loss": 0.8248, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 0.8985985160758451, |
|
"grad_norm": 0.3825197592387351, |
|
"learning_rate": 6.20142968553441e-06, |
|
"loss": 1.1401, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.8996290189612531, |
|
"grad_norm": 0.2838180681849475, |
|
"learning_rate": 6.077310923828328e-06, |
|
"loss": 0.909, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 0.9006595218466612, |
|
"grad_norm": 0.37686082536565757, |
|
"learning_rate": 5.954407906699511e-06, |
|
"loss": 1.0217, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.9016900247320693, |
|
"grad_norm": 0.3380783448392427, |
|
"learning_rate": 5.8327222250167735e-06, |
|
"loss": 1.1523, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.9027205276174773, |
|
"grad_norm": 0.2883596670864561, |
|
"learning_rate": 5.71225545389158e-06, |
|
"loss": 0.8144, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.9037510305028854, |
|
"grad_norm": 0.3483906272756692, |
|
"learning_rate": 5.59300915265778e-06, |
|
"loss": 1.1855, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 0.9047815333882935, |
|
"grad_norm": 0.2639750686093266, |
|
"learning_rate": 5.4749848648512624e-06, |
|
"loss": 0.9381, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.9058120362737015, |
|
"grad_norm": 0.3972431149683815, |
|
"learning_rate": 5.358184118190068e-06, |
|
"loss": 1.0683, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 0.9068425391591096, |
|
"grad_norm": 0.35801168305170344, |
|
"learning_rate": 5.242608424554651e-06, |
|
"loss": 1.0903, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.9068425391591096, |
|
"eval_loss": 0.9635400176048279, |
|
"eval_runtime": 2476.6467, |
|
"eval_samples_per_second": 3.23, |
|
"eval_steps_per_second": 0.202, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.9078730420445177, |
|
"grad_norm": 0.33090712708760794, |
|
"learning_rate": 5.128259279968195e-06, |
|
"loss": 0.8644, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 0.9089035449299258, |
|
"grad_norm": 0.35066225756340424, |
|
"learning_rate": 5.01513816457736e-06, |
|
"loss": 1.1482, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.9099340478153339, |
|
"grad_norm": 0.2777858386287618, |
|
"learning_rate": 4.903246542633033e-06, |
|
"loss": 0.9529, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 0.910964550700742, |
|
"grad_norm": 0.3807172458090473, |
|
"learning_rate": 4.7925858624714215e-06, |
|
"loss": 1.0499, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.9119950535861501, |
|
"grad_norm": 0.36543715138130545, |
|
"learning_rate": 4.683157556495343e-06, |
|
"loss": 1.1883, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.9130255564715581, |
|
"grad_norm": 0.3339553261899812, |
|
"learning_rate": 4.574963041155622e-06, |
|
"loss": 0.8319, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.9140560593569662, |
|
"grad_norm": 0.3202714496531148, |
|
"learning_rate": 4.468003716932734e-06, |
|
"loss": 1.1678, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 0.9150865622423743, |
|
"grad_norm": 0.27715040842554794, |
|
"learning_rate": 4.362280968318777e-06, |
|
"loss": 0.9364, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.9161170651277823, |
|
"grad_norm": 0.3628969851723974, |
|
"learning_rate": 4.257796163799455e-06, |
|
"loss": 1.0704, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 0.9171475680131904, |
|
"grad_norm": 0.4255339930477384, |
|
"learning_rate": 4.154550655836409e-06, |
|
"loss": 1.1622, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.9181780708985985, |
|
"grad_norm": 0.3098446765156209, |
|
"learning_rate": 4.052545780849715e-06, |
|
"loss": 0.8363, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 0.9192085737840066, |
|
"grad_norm": 0.38626454996818416, |
|
"learning_rate": 3.9517828592005475e-06, |
|
"loss": 1.1148, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.9202390766694146, |
|
"grad_norm": 0.2786681923266397, |
|
"learning_rate": 3.852263195174155e-06, |
|
"loss": 0.8743, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 0.9212695795548228, |
|
"grad_norm": 0.3627972033581097, |
|
"learning_rate": 3.7539880769628998e-06, |
|
"loss": 1.0305, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.9223000824402309, |
|
"grad_norm": 0.3545740104488335, |
|
"learning_rate": 3.6569587766496216e-06, |
|
"loss": 1.1188, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.9233305853256389, |
|
"grad_norm": 0.30405956916616017, |
|
"learning_rate": 3.561176550191203e-06, |
|
"loss": 0.7902, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.924361088211047, |
|
"grad_norm": 0.40805109311840626, |
|
"learning_rate": 3.46664263740224e-06, |
|
"loss": 1.0949, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 0.9253915910964551, |
|
"grad_norm": 0.2937050314911077, |
|
"learning_rate": 3.3733582619390523e-06, |
|
"loss": 0.9339, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.9264220939818631, |
|
"grad_norm": 0.37795976499150646, |
|
"learning_rate": 3.281324631283833e-06, |
|
"loss": 1.0108, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.9274525968672712, |
|
"grad_norm": 0.4168578135164906, |
|
"learning_rate": 3.1905429367289795e-06, |
|
"loss": 1.1267, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.9284830997526793, |
|
"grad_norm": 0.30324094735934326, |
|
"learning_rate": 3.101014353361753e-06, |
|
"loss": 0.8109, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 0.9295136026380874, |
|
"grad_norm": 0.3929210483289662, |
|
"learning_rate": 3.012740040048978e-06, |
|
"loss": 1.1848, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.9305441055234954, |
|
"grad_norm": 0.2889829253469363, |
|
"learning_rate": 2.9257211394220773e-06, |
|
"loss": 0.8944, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 0.9315746084089035, |
|
"grad_norm": 0.39922043302261206, |
|
"learning_rate": 2.8399587778623505e-06, |
|
"loss": 1.0599, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.9326051112943117, |
|
"grad_norm": 0.41401354380939953, |
|
"learning_rate": 2.755454065486263e-06, |
|
"loss": 1.1731, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.9336356141797197, |
|
"grad_norm": 0.3816124740915107, |
|
"learning_rate": 2.672208096131157e-06, |
|
"loss": 0.9285, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.9346661170651278, |
|
"grad_norm": 0.365839138684449, |
|
"learning_rate": 2.5902219473411204e-06, |
|
"loss": 1.1613, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 0.9356966199505359, |
|
"grad_norm": 0.30150334355809666, |
|
"learning_rate": 2.509496680352963e-06, |
|
"loss": 0.9157, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.936727122835944, |
|
"grad_norm": 0.36964220055921937, |
|
"learning_rate": 2.430033340082516e-06, |
|
"loss": 1.0569, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 0.937757625721352, |
|
"grad_norm": 0.41058988578726896, |
|
"learning_rate": 2.3518329551111217e-06, |
|
"loss": 1.1936, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.9387881286067601, |
|
"grad_norm": 0.32143724004908797, |
|
"learning_rate": 2.2748965376723e-06, |
|
"loss": 0.8617, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 0.9398186314921682, |
|
"grad_norm": 0.3695703528509068, |
|
"learning_rate": 2.199225083638656e-06, |
|
"loss": 1.2086, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.9408491343775762, |
|
"grad_norm": 0.28544602650039125, |
|
"learning_rate": 2.1248195725089624e-06, |
|
"loss": 0.9673, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 0.9418796372629843, |
|
"grad_norm": 0.33475042033286156, |
|
"learning_rate": 2.0516809673955083e-06, |
|
"loss": 1.0888, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.9429101401483924, |
|
"grad_norm": 0.40597870855708795, |
|
"learning_rate": 1.9798102150116573e-06, |
|
"loss": 1.1073, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.9439406430338005, |
|
"grad_norm": 0.35068928153409934, |
|
"learning_rate": 1.909208245659522e-06, |
|
"loss": 0.8919, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.9449711459192086, |
|
"grad_norm": 0.3520873665570541, |
|
"learning_rate": 1.8398759732179637e-06, |
|
"loss": 1.1762, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 0.9460016488046167, |
|
"grad_norm": 0.31075650924614495, |
|
"learning_rate": 1.7718142951307914e-06, |
|
"loss": 0.8997, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.9470321516900247, |
|
"grad_norm": 0.3520700228079484, |
|
"learning_rate": 1.705024092395091e-06, |
|
"loss": 1.0955, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 0.9480626545754328, |
|
"grad_norm": 0.3680157698495691, |
|
"learning_rate": 1.6395062295498698e-06, |
|
"loss": 1.178, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.9480626545754328, |
|
"eval_loss": 0.9628809690475464, |
|
"eval_runtime": 2669.1038, |
|
"eval_samples_per_second": 2.997, |
|
"eval_steps_per_second": 0.187, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.9490931574608409, |
|
"grad_norm": 0.31126006459380184, |
|
"learning_rate": 1.5752615546647975e-06, |
|
"loss": 0.8694, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 0.950123660346249, |
|
"grad_norm": 0.34126438848006335, |
|
"learning_rate": 1.5122908993293273e-06, |
|
"loss": 1.1496, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.951154163231657, |
|
"grad_norm": 0.28191122396530277, |
|
"learning_rate": 1.4505950786418255e-06, |
|
"loss": 0.9136, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 0.9521846661170651, |
|
"grad_norm": 0.3757522319369373, |
|
"learning_rate": 1.3901748911991253e-06, |
|
"loss": 1.0589, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.9532151690024732, |
|
"grad_norm": 0.3812139506827204, |
|
"learning_rate": 1.331031119086079e-06, |
|
"loss": 1.1577, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.9542456718878813, |
|
"grad_norm": 0.3285095663923165, |
|
"learning_rate": 1.2731645278655445e-06, |
|
"loss": 0.9256, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.9552761747732894, |
|
"grad_norm": 0.3434798919525598, |
|
"learning_rate": 1.2165758665683924e-06, |
|
"loss": 1.1373, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 0.9563066776586975, |
|
"grad_norm": 0.3017676114908958, |
|
"learning_rate": 1.1612658676838473e-06, |
|
"loss": 0.8882, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.9573371805441055, |
|
"grad_norm": 0.3795284009217967, |
|
"learning_rate": 1.107235247150018e-06, |
|
"loss": 1.0639, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 0.9583676834295136, |
|
"grad_norm": 0.4845153985581749, |
|
"learning_rate": 1.0544847043445938e-06, |
|
"loss": 1.0753, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.9593981863149217, |
|
"grad_norm": 0.32363946255340464, |
|
"learning_rate": 1.0030149220758288e-06, |
|
"loss": 0.8307, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 0.9604286892003298, |
|
"grad_norm": 0.4184299714975136, |
|
"learning_rate": 9.528265665736502e-07, |
|
"loss": 1.1371, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.9614591920857378, |
|
"grad_norm": 0.28512105148343453, |
|
"learning_rate": 9.039202874811192e-07, |
|
"loss": 0.9634, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 0.9624896949711459, |
|
"grad_norm": 0.360971597617964, |
|
"learning_rate": 8.562967178459391e-07, |
|
"loss": 1.0805, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.963520197856554, |
|
"grad_norm": 0.3822471953295701, |
|
"learning_rate": 8.099564741123166e-07, |
|
"loss": 1.2147, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.964550700741962, |
|
"grad_norm": 0.32385746632379775, |
|
"learning_rate": 7.649001561129354e-07, |
|
"loss": 0.8677, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.9655812036273702, |
|
"grad_norm": 0.33325536515100207, |
|
"learning_rate": 7.211283470612395e-07, |
|
"loss": 1.1736, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 0.9666117065127783, |
|
"grad_norm": 0.2789936033919106, |
|
"learning_rate": 6.786416135438512e-07, |
|
"loss": 0.9563, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.9676422093981863, |
|
"grad_norm": 0.3347741972532808, |
|
"learning_rate": 6.374405055132537e-07, |
|
"loss": 0.9439, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 0.9686727122835944, |
|
"grad_norm": 0.3903192561742831, |
|
"learning_rate": 5.975255562806647e-07, |
|
"loss": 1.1797, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.9697032151690025, |
|
"grad_norm": 0.32771315967905756, |
|
"learning_rate": 5.58897282509141e-07, |
|
"loss": 0.8953, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 0.9707337180544106, |
|
"grad_norm": 0.34884009544181055, |
|
"learning_rate": 5.215561842068728e-07, |
|
"loss": 1.1761, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.9717642209398186, |
|
"grad_norm": 0.2775010942276059, |
|
"learning_rate": 4.855027447207338e-07, |
|
"loss": 0.9457, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 0.9727947238252267, |
|
"grad_norm": 0.382431683738394, |
|
"learning_rate": 4.507374307299972e-07, |
|
"loss": 1.0359, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.9738252267106348, |
|
"grad_norm": 0.4253814777599987, |
|
"learning_rate": 4.172606922403399e-07, |
|
"loss": 1.1746, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.9748557295960428, |
|
"grad_norm": 0.3119359666789732, |
|
"learning_rate": 3.8507296257798145e-07, |
|
"loss": 0.8048, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.9758862324814509, |
|
"grad_norm": 0.35641657318401637, |
|
"learning_rate": 3.541746583840655e-07, |
|
"loss": 1.1508, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 0.9769167353668591, |
|
"grad_norm": 0.28277759207142494, |
|
"learning_rate": 3.24566179609298e-07, |
|
"loss": 0.9406, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.9779472382522671, |
|
"grad_norm": 0.3649978639842664, |
|
"learning_rate": 2.9624790950875113e-07, |
|
"loss": 1.0074, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 0.9789777411376752, |
|
"grad_norm": 0.43930720949024876, |
|
"learning_rate": 2.692202146369338e-07, |
|
"loss": 1.1133, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.9800082440230833, |
|
"grad_norm": 0.3568172083915493, |
|
"learning_rate": 2.434834448429957e-07, |
|
"loss": 0.8548, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 0.9810387469084914, |
|
"grad_norm": 0.3512721884204812, |
|
"learning_rate": 2.1903793326621957e-07, |
|
"loss": 1.1658, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.9820692497938994, |
|
"grad_norm": 0.28260427199429944, |
|
"learning_rate": 1.9588399633173605e-07, |
|
"loss": 0.8872, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 0.9830997526793075, |
|
"grad_norm": 0.3342814004364708, |
|
"learning_rate": 1.740219337463822e-07, |
|
"loss": 1.0368, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.9841302555647156, |
|
"grad_norm": 0.39410468491852235, |
|
"learning_rate": 1.534520284948715e-07, |
|
"loss": 1.1301, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.9851607584501236, |
|
"grad_norm": 0.32300550472176315, |
|
"learning_rate": 1.3417454683608554e-07, |
|
"loss": 0.8525, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.9861912613355317, |
|
"grad_norm": 0.38783208707768235, |
|
"learning_rate": 1.1618973829966572e-07, |
|
"loss": 1.2139, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 0.9872217642209398, |
|
"grad_norm": 0.27657069522605154, |
|
"learning_rate": 9.949783568272697e-08, |
|
"loss": 0.9094, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.988252267106348, |
|
"grad_norm": 0.37764224067778107, |
|
"learning_rate": 8.409905504693782e-08, |
|
"loss": 1.0543, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 0.989282769991756, |
|
"grad_norm": 0.5230932784753239, |
|
"learning_rate": 6.999359571561171e-08, |
|
"loss": 1.1661, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.989282769991756, |
|
"eval_loss": 0.9627586603164673, |
|
"eval_runtime": 2560.4353, |
|
"eval_samples_per_second": 3.124, |
|
"eval_steps_per_second": 0.195, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.9903132728771641, |
|
"grad_norm": 0.2751657982671328, |
|
"learning_rate": 5.718164027121997e-08, |
|
"loss": 0.7531, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.9913437757625722, |
|
"grad_norm": 0.3764599692340869, |
|
"learning_rate": 4.566335455299387e-08, |
|
"loss": 1.1338, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.9923742786479802, |
|
"grad_norm": 0.2992197664538524, |
|
"learning_rate": 3.5438887654737355e-08, |
|
"loss": 0.8897, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 0.9934047815333883, |
|
"grad_norm": 0.3535257610205098, |
|
"learning_rate": 2.6508371922984166e-08, |
|
"loss": 1.0454, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.9944352844187964, |
|
"grad_norm": 0.37026464651118146, |
|
"learning_rate": 1.887192295521034e-08, |
|
"loss": 1.1346, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.9954657873042044, |
|
"grad_norm": 0.3271412553655858, |
|
"learning_rate": 1.252963959834652e-08, |
|
"loss": 0.8182, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.9964962901896125, |
|
"grad_norm": 0.4040096861009905, |
|
"learning_rate": 7.481603947556703e-09, |
|
"loss": 1.0549, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 0.9975267930750206, |
|
"grad_norm": 0.28077083214803567, |
|
"learning_rate": 3.727881345105821e-09, |
|
"loss": 0.918, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.9985572959604286, |
|
"grad_norm": 0.38213498100295196, |
|
"learning_rate": 1.2685203795492762e-09, |
|
"loss": 1.0451, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 0.9995877988458368, |
|
"grad_norm": 0.4052030227111039, |
|
"learning_rate": 1.0355288510011107e-10, |
|
"loss": 1.1878, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 4852, |
|
"total_flos": 3.101569705521971e+16, |
|
"train_loss": 1.1041986294158312, |
|
"train_runtime": 123226.4419, |
|
"train_samples_per_second": 0.63, |
|
"train_steps_per_second": 0.039 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4852, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.101569705521971e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|