|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999360204734485, |
|
"eval_steps": 500, |
|
"global_step": 781, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 11.76060723311326, |
|
"learning_rate": 2.5316455696202533e-07, |
|
"loss": 1.6822, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.75819587561109, |
|
"learning_rate": 1.2658227848101267e-06, |
|
"loss": 1.6642, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.381700092209451, |
|
"learning_rate": 2.5316455696202535e-06, |
|
"loss": 1.6269, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.940372481742168, |
|
"learning_rate": 3.7974683544303802e-06, |
|
"loss": 1.4696, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.998284936315808, |
|
"learning_rate": 5.063291139240507e-06, |
|
"loss": 1.2737, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4930912253553508, |
|
"learning_rate": 6.329113924050634e-06, |
|
"loss": 1.2514, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5968427508980252, |
|
"learning_rate": 7.5949367088607605e-06, |
|
"loss": 1.2237, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.1919266894362968, |
|
"learning_rate": 8.860759493670886e-06, |
|
"loss": 1.1799, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.207968475527044, |
|
"learning_rate": 1.0126582278481014e-05, |
|
"loss": 1.155, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1966812942254237, |
|
"learning_rate": 1.139240506329114e-05, |
|
"loss": 1.143, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.3885261269338125, |
|
"learning_rate": 1.2658227848101268e-05, |
|
"loss": 1.1694, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0658475553487325, |
|
"learning_rate": 1.3924050632911395e-05, |
|
"loss": 1.1116, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.2195742201568416, |
|
"learning_rate": 1.5189873417721521e-05, |
|
"loss": 1.1111, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1281565872746064, |
|
"learning_rate": 1.6455696202531647e-05, |
|
"loss": 1.1197, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0428372909471566, |
|
"learning_rate": 1.7721518987341772e-05, |
|
"loss": 1.1046, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1098623402342003, |
|
"learning_rate": 1.89873417721519e-05, |
|
"loss": 1.0803, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.02047373991112, |
|
"learning_rate": 1.999989986294826e-05, |
|
"loss": 1.1106, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1133871103761916, |
|
"learning_rate": 1.9996395276708856e-05, |
|
"loss": 1.0778, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.067392872006611, |
|
"learning_rate": 1.9987885843187717e-05, |
|
"loss": 1.0884, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.9722703311148946, |
|
"learning_rate": 1.9974375822762117e-05, |
|
"loss": 1.0667, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0862367280314877, |
|
"learning_rate": 1.9955871979429188e-05, |
|
"loss": 1.0632, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2512571659408036, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 1.0659, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1751369288993785, |
|
"learning_rate": 1.9903922376558432e-05, |
|
"loss": 1.0673, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2199342074499273, |
|
"learning_rate": 1.9870502626379127e-05, |
|
"loss": 1.0412, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0462278511220546, |
|
"learning_rate": 1.983214105898757e-05, |
|
"loss": 1.0417, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1736617165184593, |
|
"learning_rate": 1.978885688068572e-05, |
|
"loss": 1.0533, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.167836274508835, |
|
"learning_rate": 1.9740671762355548e-05, |
|
"loss": 1.043, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.9596104398511005, |
|
"learning_rate": 1.9687609828609156e-05, |
|
"loss": 1.0659, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.089547455726339, |
|
"learning_rate": 1.9629697645710432e-05, |
|
"loss": 1.0285, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1072019428376578, |
|
"learning_rate": 1.9566964208274254e-05, |
|
"loss": 1.0263, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0354367364165142, |
|
"learning_rate": 1.9499440924749878e-05, |
|
"loss": 1.0864, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0814026659538016, |
|
"learning_rate": 1.9427161601695833e-05, |
|
"loss": 1.0881, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9888075369780667, |
|
"learning_rate": 1.9350162426854152e-05, |
|
"loss": 1.0016, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1029714203666994, |
|
"learning_rate": 1.926848195103242e-05, |
|
"loss": 1.0316, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.184388866021708, |
|
"learning_rate": 1.9182161068802742e-05, |
|
"loss": 1.038, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1210104018767146, |
|
"learning_rate": 1.909124299802724e-05, |
|
"loss": 1.03, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1599379220601473, |
|
"learning_rate": 1.8995773258220374e-05, |
|
"loss": 1.0362, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9505042533934197, |
|
"learning_rate": 1.8895799647758912e-05, |
|
"loss": 1.0465, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.055130356572991, |
|
"learning_rate": 1.879137221995095e-05, |
|
"loss": 1.0302, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.188221794161717, |
|
"learning_rate": 1.868254325797594e-05, |
|
"loss": 1.0449, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.24255219114766, |
|
"learning_rate": 1.8569367248708343e-05, |
|
"loss": 1.0453, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9610325493923482, |
|
"learning_rate": 1.845190085543795e-05, |
|
"loss": 1.0736, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.213521279472012, |
|
"learning_rate": 1.8330202889500518e-05, |
|
"loss": 1.07, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0901868382455668, |
|
"learning_rate": 1.8204334280833005e-05, |
|
"loss": 1.0185, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0577550693821212, |
|
"learning_rate": 1.807435804746807e-05, |
|
"loss": 1.0521, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1921950083382502, |
|
"learning_rate": 1.7940339263983112e-05, |
|
"loss": 1.0113, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1451176215596368, |
|
"learning_rate": 1.7802345028919728e-05, |
|
"loss": 1.0366, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.039996713908348, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.9962, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9566229775736538, |
|
"learning_rate": 1.7514708515485002e-05, |
|
"loss": 1.0363, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4374877069036986, |
|
"learning_rate": 1.736521024670737e-05, |
|
"loss": 1.0319, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.02012945474932, |
|
"learning_rate": 1.7212024473438145e-05, |
|
"loss": 1.027, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1302434630373548, |
|
"learning_rate": 1.705522789046377e-05, |
|
"loss": 1.0065, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9757055931379662, |
|
"learning_rate": 1.6894899000377462e-05, |
|
"loss": 1.0352, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.049480995976599, |
|
"learning_rate": 1.67311180742757e-05, |
|
"loss": 1.0326, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0192736067347892, |
|
"learning_rate": 1.65639671115693e-05, |
|
"loss": 0.9999, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1167809018301396, |
|
"learning_rate": 1.6393529798929103e-05, |
|
"loss": 1.0055, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0825619824653154, |
|
"learning_rate": 1.621989146838704e-05, |
|
"loss": 1.0233, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.005770462178686, |
|
"learning_rate": 1.6043139054613326e-05, |
|
"loss": 1.0038, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.098473406093216, |
|
"learning_rate": 1.586336105139127e-05, |
|
"loss": 0.9977, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0490431615668332, |
|
"learning_rate": 1.568064746731156e-05, |
|
"loss": 1.0061, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.004305030186986, |
|
"learning_rate": 1.5495089780708062e-05, |
|
"loss": 0.9927, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0306996734289007, |
|
"learning_rate": 1.530678089385782e-05, |
|
"loss": 0.983, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9423273242468014, |
|
"learning_rate": 1.5115815086468103e-05, |
|
"loss": 0.9799, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1246961996346894, |
|
"learning_rate": 1.492228796847385e-05, |
|
"loss": 1.0154, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0747529887949312, |
|
"learning_rate": 1.4726296432169095e-05, |
|
"loss": 1.0162, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1471917674492866, |
|
"learning_rate": 1.4527938603696376e-05, |
|
"loss": 1.0363, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0505847496584861, |
|
"learning_rate": 1.4327313793918362e-05, |
|
"loss": 0.9627, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0262993815065058, |
|
"learning_rate": 1.4124522448696407e-05, |
|
"loss": 1.006, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.044795548816788, |
|
"learning_rate": 1.3919666098600753e-05, |
|
"loss": 1.0076, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.050133490357968, |
|
"learning_rate": 1.3712847308077737e-05, |
|
"loss": 1.021, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1313342513320197, |
|
"learning_rate": 1.350416962409934e-05, |
|
"loss": 0.9964, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0532665494275932, |
|
"learning_rate": 1.3293737524320798e-05, |
|
"loss": 1.0263, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1149778014746092, |
|
"learning_rate": 1.3081656364772308e-05, |
|
"loss": 0.9959, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.991705987820452, |
|
"learning_rate": 1.2868032327110904e-05, |
|
"loss": 1.0107, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.1090968802518788, |
|
"learning_rate": 1.2652972365459008e-05, |
|
"loss": 1.015, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0489892057222994, |
|
"learning_rate": 1.243658415285622e-05, |
|
"loss": 0.9843, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1053846068835982, |
|
"learning_rate": 1.2218976027351177e-05, |
|
"loss": 0.9901, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0419685500243974, |
|
"learning_rate": 1.2000256937760446e-05, |
|
"loss": 0.9755, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9541231393070981, |
|
"learning_rate": 1.1780536389121668e-05, |
|
"loss": 0.9945, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0666176053825536, |
|
"learning_rate": 1.155992438786818e-05, |
|
"loss": 0.9917, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0564637749960983, |
|
"learning_rate": 1.1338531386752618e-05, |
|
"loss": 0.9821, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.1667820924248677, |
|
"learning_rate": 1.1116468229547079e-05, |
|
"loss": 0.9557, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.2270496171271985, |
|
"learning_rate": 1.0893846095547493e-05, |
|
"loss": 0.9876, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0509829176213852, |
|
"learning_rate": 1.0670776443910024e-05, |
|
"loss": 0.9705, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.031556959390347, |
|
"learning_rate": 1.0447370957847343e-05, |
|
"loss": 0.9956, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9782347943874674, |
|
"learning_rate": 1.0223741488712732e-05, |
|
"loss": 1.0016, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1997141162620377, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9926, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0406139514481398, |
|
"learning_rate": 9.776258511287271e-06, |
|
"loss": 0.9863, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8886020594570727, |
|
"learning_rate": 9.55262904215266e-06, |
|
"loss": 0.974, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0655688286549563, |
|
"learning_rate": 9.329223556089976e-06, |
|
"loss": 0.9757, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0923191593800243, |
|
"learning_rate": 9.10615390445251e-06, |
|
"loss": 0.9713, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.076596518914256, |
|
"learning_rate": 8.883531770452924e-06, |
|
"loss": 0.9654, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8916672807364537, |
|
"learning_rate": 8.661468613247387e-06, |
|
"loss": 1.0012, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.874231994609429, |
|
"learning_rate": 8.440075612131823e-06, |
|
"loss": 0.9935, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0720988215903333, |
|
"learning_rate": 8.219463610878336e-06, |
|
"loss": 1.0012, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9880422724089176, |
|
"learning_rate": 7.999743062239557e-06, |
|
"loss": 0.9978, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.1188809925235335, |
|
"learning_rate": 7.781023972648826e-06, |
|
"loss": 1.0149, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0471236576275667, |
|
"learning_rate": 7.563415847143782e-06, |
|
"loss": 0.9488, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9980906565809108, |
|
"learning_rate": 7.347027634540993e-06, |
|
"loss": 0.9909, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0442475148255606, |
|
"learning_rate": 7.131967672889101e-06, |
|
"loss": 0.9747, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.1002478336061363, |
|
"learning_rate": 6.918343635227694e-06, |
|
"loss": 0.9745, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.136505991571122, |
|
"learning_rate": 6.706262475679205e-06, |
|
"loss": 0.9962, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.018960782583497, |
|
"learning_rate": 6.495830375900665e-06, |
|
"loss": 0.9551, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.1144991763692194, |
|
"learning_rate": 6.287152691922264e-06, |
|
"loss": 0.9995, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.02967918390761, |
|
"learning_rate": 6.080333901399252e-06, |
|
"loss": 0.9789, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0324482375798092, |
|
"learning_rate": 5.875477551303596e-06, |
|
"loss": 0.99, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9742298498254184, |
|
"learning_rate": 5.672686206081638e-06, |
|
"loss": 0.992, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.132108415071919, |
|
"learning_rate": 5.47206139630363e-06, |
|
"loss": 1.0074, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0995283960956674, |
|
"learning_rate": 5.273703567830908e-06, |
|
"loss": 0.977, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0441789439099798, |
|
"learning_rate": 5.077712031526153e-06, |
|
"loss": 1.0189, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.981986385580179, |
|
"learning_rate": 4.8841849135319015e-06, |
|
"loss": 0.9531, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9434471380481197, |
|
"learning_rate": 4.693219106142186e-06, |
|
"loss": 0.9845, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.0743211447236112, |
|
"learning_rate": 4.504910219291941e-06, |
|
"loss": 0.9544, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9538980158325512, |
|
"learning_rate": 4.319352532688444e-06, |
|
"loss": 0.9834, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9654411881427548, |
|
"learning_rate": 4.13663894860873e-06, |
|
"loss": 0.9788, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.993405896499914, |
|
"learning_rate": 3.956860945386677e-06, |
|
"loss": 1.0042, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9704739668364883, |
|
"learning_rate": 3.7801085316129615e-06, |
|
"loss": 0.9868, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.977366110513439, |
|
"learning_rate": 3.606470201070904e-06, |
|
"loss": 0.9468, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9579638698628271, |
|
"learning_rate": 3.4360328884307058e-06, |
|
"loss": 0.9635, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.106753292275808, |
|
"learning_rate": 3.2688819257242963e-06, |
|
"loss": 0.9617, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1425286371389305, |
|
"learning_rate": 3.1051009996225434e-06, |
|
"loss": 0.9692, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1364557782660365, |
|
"learning_rate": 2.9447721095362325e-06, |
|
"loss": 0.9744, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9406598524967239, |
|
"learning_rate": 2.7879755265618558e-06, |
|
"loss": 0.9925, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0664629358580562, |
|
"learning_rate": 2.6347897532926293e-06, |
|
"loss": 0.9588, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0139901204102686, |
|
"learning_rate": 2.485291484515e-06, |
|
"loss": 0.9769, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8650137198761167, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.9957, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0370421826033083, |
|
"learning_rate": 2.1976549710802754e-06, |
|
"loss": 0.992, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0397841541534047, |
|
"learning_rate": 2.0596607360168897e-06, |
|
"loss": 0.9804, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.2468139771235325, |
|
"learning_rate": 1.9256419525319316e-06, |
|
"loss": 0.9528, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9006093150032681, |
|
"learning_rate": 1.7956657191669969e-06, |
|
"loss": 0.9388, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.0317668957030404, |
|
"learning_rate": 1.6697971104994847e-06, |
|
"loss": 0.9781, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.0387479722065127, |
|
"learning_rate": 1.5480991445620541e-06, |
|
"loss": 0.9917, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9754842911312599, |
|
"learning_rate": 1.4306327512916574e-06, |
|
"loss": 0.9696, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.0363013030380983, |
|
"learning_rate": 1.3174567420240647e-06, |
|
"loss": 0.9658, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8960305658205177, |
|
"learning_rate": 1.2086277800490554e-06, |
|
"loss": 0.9595, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.179228433134159, |
|
"learning_rate": 1.1042003522410882e-06, |
|
"loss": 0.9688, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9413992615815986, |
|
"learning_rate": 1.0042267417796292e-06, |
|
"loss": 0.989, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9758806517342423, |
|
"learning_rate": 9.08757001972762e-07, |
|
"loss": 0.9629, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.1736698700435968, |
|
"learning_rate": 8.178389311972612e-07, |
|
"loss": 0.9557, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0617448240010183, |
|
"learning_rate": 7.315180489675822e-07, |
|
"loss": 0.9637, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9621374315918411, |
|
"learning_rate": 6.498375731458529e-07, |
|
"loss": 0.987, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9896147163255197, |
|
"learning_rate": 5.728383983041696e-07, |
|
"loss": 0.9728, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9188541096678722, |
|
"learning_rate": 5.005590752501244e-07, |
|
"loss": 1.0125, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.99532325879258, |
|
"learning_rate": 4.3303579172574884e-07, |
|
"loss": 0.969, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.0242423087799273, |
|
"learning_rate": 3.7030235428956895e-07, |
|
"loss": 0.9804, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9934143231160297, |
|
"learning_rate": 3.1239017139084725e-07, |
|
"loss": 0.9665, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.1181836954053048, |
|
"learning_rate": 2.593282376444539e-07, |
|
"loss": 0.9763, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.092994710465261, |
|
"learning_rate": 2.11143119314281e-07, |
|
"loss": 0.9634, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9598027605106688, |
|
"learning_rate": 1.6785894101243205e-07, |
|
"loss": 1.0029, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.0352713808102656, |
|
"learning_rate": 1.2949737362087156e-07, |
|
"loss": 0.9645, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9639150803887012, |
|
"learning_rate": 9.607762344156946e-08, |
|
"loss": 0.9461, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.9466905218851684, |
|
"learning_rate": 6.761642258056977e-08, |
|
"loss": 0.9829, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.0617099395887875, |
|
"learning_rate": 4.412802057081278e-08, |
|
"loss": 0.9796, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.020436800278922, |
|
"learning_rate": 2.5624177237884017e-08, |
|
"loss": 0.9529, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.8998248978192732, |
|
"learning_rate": 1.2114156812284006e-08, |
|
"loss": 0.9831, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.216227728570802, |
|
"learning_rate": 3.6047232911462506e-09, |
|
"loss": 0.985, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9463439407790857, |
|
"learning_rate": 1.0013705174061195e-10, |
|
"loss": 0.9774, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9624356627464294, |
|
"eval_runtime": 1769.185, |
|
"eval_samples_per_second": 55.649, |
|
"eval_steps_per_second": 1.739, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 781, |
|
"total_flos": 113044957102080.0, |
|
"train_loss": 1.0241747791574798, |
|
"train_runtime": 9230.502, |
|
"train_samples_per_second": 10.834, |
|
"train_steps_per_second": 0.085 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 781, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 113044957102080.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|